コード例 #1
0
    def prepare_request(self, request):
        cookies = request.cookies or {}

        # Bootstrap CookieJar.
        if not isinstance(cookies, cookielib.CookieJar):
            cookies = cookiejar_from_dict(cookies)

        # Merge with session cookies
        merged_cookies = merge_cookies(
            merge_cookies(RequestsCookieJar(), self.cookies), cookies)

        # Set environment's basic authentication if not explicitly set.
        # auth = request.auth
        # if self.trust_env and not auth and not self.auth:
        #     auth = get_netrc_auth(request.url)

        p = PreparedRequest()
        p.prepare(
            method=request.method.upper(),
            url=request.url,
            files=request.files,
            data=request.data,
            json=request.json,
            headers=merge_setting(request.headers,
                                  self.headers,
                                  dict_class=CaseInsensitiveDict),
            params=merge_setting(request.params, self.params),
            auth=merge_setting(request.auth, self.auth),
            cookies=merged_cookies,
            hooks=merge_hooks(request.hooks, self.hooks),
        )
        return p
コード例 #2
0
ファイル: up9lib.py プロジェクト: haiut/sample01
def merge_cookies_into_session(cookies_input):
    jar = _context.session.cookies
    if isinstance(cookies_input, list):
        for item in cookies_input:
            cookie = Cookie(
                0,
                item['name'],
                item['value'],
                None,
                False,
                item['domain'],
                True,
                bool(item['domain'].startswith(".")),
                item['path'],
                True,
                item['secure'],
                None,
                False,
                "",
                "",
                {},
            )
            logging.debug("Set cookie into context: %r", cookie)
            jar.set_cookie(cookie)
    else:
        attrs_set = parse_ns_headers(cookies_input.split('; '))
        merge_cookies(
            jar, cookiejar_from_dict({x[0][0]: x[0][1]
                                      for x in attrs_set}))
コード例 #3
0
ファイル: session.py プロジェクト: deepbrook/bitex-framework
    def prepare_request(self, request: BitexRequest) -> BitexPreparedRequest:
        """Prepare a :class:`BitexPreparedRequest` object for transmission.

        This implementation extends :class:`requests.Session.prepare_request` by
        making a call to :data:`bitex.list_loaded_plugins` and checking if we have any plugins
        that may provide a custom :class:`BitexPreparedRequest` class.
        """
        cookies = request.cookies or {}

        # Bootstrap CookieJar.
        if not isinstance(cookies, cookielib.CookieJar):
            cookies = cookiejar_from_dict(cookies)

        # Merge with session cookies
        session_cookies = merge_cookies(RequestsCookieJar(), self.cookies)
        merged_cookies = merge_cookies(session_cookies, cookies)

        # Set environment's basic authentication if not explicitly set.
        auth = request.auth
        if self.trust_env and not auth and not self.auth:
            auth = get_netrc_auth(request.url)
        # Inject any custom classes for handling the exchange stated in the
        # BitexRequest object.
        custom_classes = list_loaded_plugins().get(request.exchange, None)
        if custom_classes:
            p = custom_classes["PreparedRequest"](request.exchange)
            # Only use the custom auth class if no auth object was
            # provided explicitly. Otherwise we would overwrite user-specified
            # auth objects passed to self.request.
            if not self.auth and request.private:
                self.auth = custom_classes["Auth"](self.key, self.secret)
        else:
            p = BitexPreparedRequest(request.exchange)
        p.prepare(
            method=request.method.upper(),
            url=request.url,
            files=request.files,
            data=request.data,
            json=request.json,
            headers=merge_setting(request.headers,
                                  self.headers,
                                  dict_class=CaseInsensitiveDict),
            params=merge_setting(request.params, self.params),
            auth=merge_setting(auth, self.auth),
            cookies=merged_cookies,
            hooks=merge_hooks(request.hooks, self.hooks),
        )
        return p
コード例 #4
0
    def set_token(self):
        """对 cookie 请求进行逻辑判断

        :return: 结果字典
        """
        from requests import cookies

        token_response = self.fetch_token()
        if token_response:
            if token_response.status_code == 200:
                try:
                    info = token_response.json()
                except:
                    return return_result(4000, [token_response.text],
                                         desc=u'获取 token 网络错误')
                if 'error.unauthorized' in token_response.text:
                    self.cookies = cookies.merge_cookies(
                        self.cookies, token_response.cookies)
                    return return_result(2000, [info], desc=u'获取 token 成功')
                else:
                    return return_result(5500, [token_response.text],
                                         desc=u'获取 token 网络错误')
            else:
                return return_result(4000, [],
                                     desc=u'获取 token : {}'.format(
                                         token_response.status_code))
        else:
            return return_result(4000, [], desc=u'获取 token 网络错误')
コード例 #5
0
def _extract_cookies(request, response, cookies):
    """Add cookies to the response.

    Cookies in requests are extracted from the headers in the original_response
    httplib.HTTPMessage which we don't create so we have to do this step
    manually.
    """
    # This will add cookies set manually via the Set-Cookie or Set-Cookie2
    # header but this only allows 1 cookie to be set.
    http_message = compat._FakeHTTPMessage(response.headers)
    response.cookies.extract_cookies(MockResponse(http_message),
                                     MockRequest(request))

    # This allows you to pass either a CookieJar or a dictionary to request_uri
    # or directly to create_response. To allow more than one cookie to be set.
    if cookies:
        merge_cookies(response.cookies, cookies)
コード例 #6
0
ファイル: response.py プロジェクト: bdrich/neutron-lbaas
def _extract_cookies(request, response, cookies):
    """Add cookies to the response.

    Cookies in requests are extracted from the headers in the original_response
    httplib.HTTPMessage which we don't create so we have to do this step
    manually.
    """
    # This will add cookies set manually via the Set-Cookie or Set-Cookie2
    # header but this only allows 1 cookie to be set.
    http_message = compat._FakeHTTPMessage(response.headers)
    response.cookies.extract_cookies(MockResponse(http_message),
                                     MockRequest(request))

    # This allows you to pass either a CookieJar or a dictionary to request_uri
    # or directly to create_response. To allow more than one cookie to be set.
    if cookies:
        merge_cookies(response.cookies, cookies)
コード例 #7
0
    def update_session(self, alias, headers=None, cookies=None):
        """Updates HTTP Session Headers and Cookies.

        Session will be identified using the ``alias`` name.
        Dictionary of ``headers`` and ``cookies`` to be updated and merged into session data.
        """
        session = self._cache.switch(alias)
        session.headers = merge_setting(headers, session.headers)
        session.cookies = merge_cookies(session.cookies, cookies)
コード例 #8
0
    def update_session(self, alias, headers=None, cookies=None):
        """Update Session Headers: update a HTTP Session Headers

        ``alias`` Robot Framework alias to identify the session

        ``headers`` Dictionary of headers merge into session
        """
        session = self._cache.switch(alias)
        session.headers = merge_setting(headers, session.headers)
        session.cookies = merge_cookies(session.cookies, cookies)
コード例 #9
0
    def update_session(self, alias, headers=None, cookies=None):
        """Update Session Headers: update a HTTP Session Headers

        ``alias`` Robot Framework alias to identify the session

        ``headers`` Dictionary of headers merge into session
        """
        session = self._cache.switch(alias)
        session.headers = merge_setting(headers, session.headers)
        session.cookies = merge_cookies(session.cookies, cookies)
コード例 #10
0
ファイル: sync_requests.py プロジェクト: tinybees/feshttp
    def prepare_request(self, request):
        """Constructs a :class:`PreparedRequest <PreparedRequest>` for
        transmission and returns it. The :class:`PreparedRequest` has settings
        merged from the :class:`Request <Request>` instance and those of the
        :class:`Session`.

        :param request: :class:`Request` instance to prepare with this
            session's settings.
        :rtype: requests.PreparedRequest
        """
        cookies = request.cookies or {}

        # Bootstrap CookieJar.
        if not isinstance(cookies, cookielib.CookieJar):
            cookies = cookiejar_from_dict(cookies)

        # Merge with session cookies
        merged_cookies = merge_cookies(
            merge_cookies(RequestsCookieJar(), self.cookies), cookies)

        # Set environment's basic authentication if not explicitly set.
        auth = request.auth
        if self.trust_env and not auth and not self.auth:
            auth = get_netrc_auth(request.url)

        p = CustomPreparedRequest()
        p.prepare(
            method=request.method.upper(),
            url=request.url,
            files=request.files,
            data=request.data,
            json=request.json,
            headers=merge_setting(request.headers,
                                  self.headers,
                                  dict_class=CaseInsensitiveDict),
            params=merge_setting(request.params, self.params),
            auth=merge_setting(auth, self.auth),
            cookies=merged_cookies,
            hooks=merge_hooks(request.hooks, self.hooks),
        )
        return p
コード例 #11
0
ファイル: request.py プロジェクト: loveallufev/crawler
    def send(self, url, allowRedirect = True, files=None, stream=False, format=None):
        if not self.referer:
            self.referer = urlsplit( url ).netloc

        if not 'Referer' in self.header:
            self.header['Referer'] = self.referer

        if not 'Host' in self.header:
            self.header['Host'] = self.referer

        if self.__useCookieFile and not self.__session.cookies:
            self.cookies = self.__loadCookie(self.cookiePath)
        requestType = "GET"
        if self.content:
            requestType = "POST"
        if self.userAgent:
            self.header['User-Agent'] = self.userAgent

        result = "EMPTY"
        meta_param = {
            "url": url, "params": self.payload,
            "cookies": self.cookies, "headers": self.header,
            "allow_redirects": allowRedirect, "timeout": self.timeout,
            "stream": stream
        }

        if requestType == "GET":
            result = self.__session.get(**meta_param)

        elif requestType == "POST":
            if files:
                meta_param['files'] = files
            if format and format.lower() == 'json':
                meta_param['json'] = self.content
            else:
                meta_param['data'] = self.content
            result = self.__session.post(**meta_param)

        if not self.cookies:
            self.cookies = self.__session.cookies
        else:
            self.cookies = merge_cookies(self.cookies, self.__session.cookies)

        if (self.__useCookieFile):
            self.__saveCookie(self.cookies, self.cookiePath)

        return result
コード例 #12
0
    def fetch_session(self):
        from requests import cookies

        session_response = self.session_request()
        if session_response:
            if session_response.status_code == 200:
                try:
                    self.cookies = cookies.merge_cookies(
                        self.cookies, session_response.cookies)
                    return returnResult(2000, [], desc=u'获取 session 成功')
                except:
                    return returnResult(4100, [], desc=u'获取 session 解析错误')
            else:
                returnResult(4000, [],
                             desc=u'获取 session 网络错误: {}'.format(
                                 session_response.status_code))
        else:
            return returnResult(4000, [], desc=u'获取 session 网络错误')
コード例 #13
0
    def getmsgcode(self):
        from requests import cookies
        url = "http://service.js.10086.cn/my/sms.do"
        form_data = {"busiNum": "QDCX"}
        self.headers[
            "Referer"] = "http://service.js.10086.cn/my/MY_QDCX.html?t=1490249727911"
        self.headers["Origin"] = "http://service.js.10086.cn"
        self.headers["Host"] = "service.js.10086.cn"

        options = {
            'method': 'post',
            'url': url,
            'form': form_data,
            'params': None,
            'cookies': self.cookies,
            'headers': self.headers,
            'timeout': 30
        }
        response = Request.basic(options)
        if response:
            if response.status_code == 200:
                result_dict = json.loads(response.content)
                if result_dict['success'] == True:
                    self.cookies = cookies.merge_cookies(
                        self.cookies, response.cookies)
                    self.cookies = dict_from_cookiejar(self.cookies)
                    return return_result(code=2000,
                                         data=self.cookies,
                                         desc=u"获取短信验证码成功")
                else:
                    return return_result(code=4000,
                                         data=None,
                                         desc=u"获取短信验证码失败!!!")
            else:
                return return_result(code=4000,
                                     data=None,
                                     desc=u"{"
                                     u"}用户短信验证码请求失败!!!".format(self.phone_num))
        else:
            return return_result(code=4000,
                                 data=None,
                                 desc=u"{"
                                 u"}用户短信验证码请求失败!!!".format(self.phone_num))
コード例 #14
0
    def set_login_cookie(self, login_url):
        from requests import cookies

        login_url_response = self.login_url_request(login_url)
        if login_url_response:
            if login_url_response.status_code == 302:
                if login_url_response.cookies:
                    try:
                        self.cookies = cookies.merge_cookies(
                            self.cookies, login_url_response.cookies)
                        return returnResult(2000, [], desc=u'登陆网址成功')
                    except:
                        return returnResult(4100, [], desc=u'无法获取 cookie')
                else:
                    return returnResult(4100, [], desc=u'cookie 获取错误')
            else:
                returnResult(4000, [],
                             desc=u'登陆网址网络错误: {}'.format(
                                 login_url_response.status_code))
        else:
            return returnResult(4000, [], desc=u'登陆网址网络错误')
コード例 #15
0
    def login_handle(self, referer_url):
        """对登陆请求进行逻辑判断

        :param referer_url: 相应的请求参数
        :return: 结果字典
        """
        from requests import cookies

        login_response = self.login_request(referer_url[0])
        if login_response:
            if login_response.status_code == 200:
                try:
                    info = login_response.json()
                    msg = info['failMsg']
                except Exception as _:
                    return return_result(4100, [], desc=login_response.text)
                if 'returnCode' in info:
                    if info['returnCode'] == '1000':
                        self.cookies = cookies.merge_cookies(
                            self.cookies, login_response.cookies)
                        return return_result(2000, [],
                                             desc=u'登陆请求: {}'.format(msg))
                    elif info['returnCode'] == '9080010007':
                        return return_result(4610, [],
                                             desc=u'登陆请求: {}'.format(msg))
                    else:
                        return return_result(4000, [info],
                                             desc=u'登陆请求: {}'.format(msg))
                else:
                    msg = info
                    return return_result(5500, [], desc=msg)
            else:
                return return_result(4000, [],
                                     desc=u'登陆请求: {}'.format(
                                         login_response.status_code))
        else:
            return return_result(4000, [], desc=u'登陆请求网络错误')
コード例 #16
0
ファイル: client.py プロジェクト: bennr01/treq
    def request(
        self,
        method,
        url,
        *,
        params=None,
        headers=None,
        data=None,
        files=None,
        json=_NOTHING,
        auth=None,
        cookies=None,
        allow_redirects=True,
        browser_like_redirects=False,
        unbuffered=False,
        reactor=None,
        timeout=None,
        _stacklevel=2,
    ):
        """
        See :func:`treq.request()`.
        """
        method = method.encode('ascii').upper()

        if isinstance(url, DecodedURL):
            parsed_url = url.encoded_url
        elif isinstance(url, EncodedURL):
            parsed_url = url
        elif isinstance(url, str):
            # We use hyperlink in lazy mode so that users can pass arbitrary
            # bytes in the path and querystring.
            parsed_url = EncodedURL.from_text(url)
        else:
            parsed_url = EncodedURL.from_text(url.decode('ascii'))

        # Join parameters provided in the URL
        # and the ones passed as argument.
        if params:
            parsed_url = parsed_url.replace(
                query=parsed_url.query + tuple(_coerced_query_params(params)))

        url = parsed_url.to_uri().to_text().encode('ascii')

        headers = self._request_headers(headers, _stacklevel + 1)

        bodyProducer, contentType = self._request_body(data,
                                                       files,
                                                       json,
                                                       stacklevel=_stacklevel +
                                                       1)
        if contentType is not None:
            headers.setRawHeaders(b'Content-Type', [contentType])

        if not isinstance(cookies, CookieJar):
            cookies = cookiejar_from_dict(cookies)

        cookies = merge_cookies(self._cookiejar, cookies)
        wrapped_agent = CookieAgent(self._agent, cookies)

        if allow_redirects:
            if browser_like_redirects:
                wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent)
            else:
                wrapped_agent = RedirectAgent(wrapped_agent)

        wrapped_agent = ContentDecoderAgent(wrapped_agent,
                                            [(b'gzip', GzipDecoder)])

        if auth:
            wrapped_agent = add_auth(wrapped_agent, auth)

        d = wrapped_agent.request(method,
                                  url,
                                  headers=headers,
                                  bodyProducer=bodyProducer)

        if reactor is None:
            from twisted.internet import reactor
        if timeout:
            delayedCall = reactor.callLater(timeout, d.cancel)

            def gotResult(result):
                if delayedCall.active():
                    delayedCall.cancel()
                return result

            d.addBoth(gotResult)

        if not unbuffered:
            d.addCallback(_BufferedResponse)

        return d.addCallback(_Response, cookies)
コード例 #17
0
    def request(self, method, url, **kwargs):
        method = method.upper()

        # Join parameters provided in the URL
        # and the ones passed as argument.
        params = kwargs.get('params')
        if params:
            url = _combine_query_params(url, params)

        # Convert headers dictionary to
        # twisted raw headers format.
        headers = kwargs.get('headers')
        if headers:
            if isinstance(headers, dict):
                h = Headers({})
                for k, v in headers.iteritems():
                    if isinstance(v, str):
                        h.addRawHeader(k, v)
                    else:
                        h.setRawHeaders(k, v)

                headers = h
        else:
            headers = Headers({})

        # Here we choose a right producer
        # based on the parameters passed in.
        bodyProducer = None
        data = kwargs.get('data')
        files = kwargs.get('files')
        if files:
            # If the files keyword is present we will issue a
            # multipart/form-data request as it suits better for cases
            # with files and/or large objects.
            files = list(_convert_files(files))
            boundary = uuid.uuid4()
            headers.setRawHeaders(
                'content-type', [
                    'multipart/form-data; boundary=%s' % (boundary,)])
            if data:
                data = _convert_params(data)
            else:
                data = []

            bodyProducer = multipart.MultiPartProducer(
                data + files, boundary=boundary)
        elif data:
            # Otherwise stick to x-www-form-urlencoded format
            # as it's generally faster for smaller requests.
            if isinstance(data, (dict, list, tuple)):
                headers.setRawHeaders(
                    'content-type', ['application/x-www-form-urlencoded'])
                data = urlencode(data, doseq=True)
            bodyProducer = IBodyProducer(data)

        cookies = kwargs.get('cookies', {})

        if not isinstance(cookies, CookieJar):
            cookies = cookiejar_from_dict(cookies)

        cookies = merge_cookies(self._cookiejar, cookies)

        wrapped_agent = CookieAgent(self._agent, cookies)

        if kwargs.get('allow_redirects', True):
            wrapped_agent = RedirectAgent(wrapped_agent)

        wrapped_agent = ContentDecoderAgent(wrapped_agent,
                                            [('gzip', GzipDecoder)])

        auth = kwargs.get('auth')
        if auth:
            wrapped_agent = add_auth(wrapped_agent, auth)

        d = wrapped_agent.request(
            method, url, headers=headers,
            bodyProducer=bodyProducer)

        timeout = kwargs.get('timeout')
        if timeout:
            delayedCall = default_reactor(kwargs.get('reactor')).callLater(
                timeout, d.cancel)

            def gotResult(result):
                if delayedCall.active():
                    delayedCall.cancel()
                return result

            d.addBoth(gotResult)

        if not kwargs.get('unbuffered', False):
            d.addCallback(_BufferedResponse)

        return d.addCallback(_Response, cookies)
コード例 #18
0
    def resolve_redirects(self,
                          resp,
                          req,
                          stream=False,
                          timeout=None,
                          verify=True,
                          cert=None,
                          proxies=None,
                          yield_requests=False,
                          **adapter_kwargs):
        """Receives a Response. Returns a generator of Responses or Requests."""

        hist = []  # keep track of history

        url = self.get_redirect_target(resp)
        while url:
            prepared_request = req.copy()

            # Update history and keep track of redirects.
            # resp.history must ignore the original request in this loop
            hist.append(resp)
            resp.history = hist[1:]

            # Consume socket so it can be released
            resp.content

            if self.max_redirects <= len(resp.history):
                raise TooManyRedirects('Exceeded %s redirects.' %
                                       self.max_redirects,
                                       response=resp)

            # Release the connection
            resp.close()

            # Handle redirection without scheme (see: RFC 1808 Section 4)
            if url.startswith('//'):
                parsed_rurl = urlparse(resp.url)
                url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url)

            # The scheme should be lower case...
            parsed = urlparse(url)
            url = parsed.geturl()

            # Facilitate relative 'location' headers, as allowed by RFC 7231.
            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
            # Compliant with RFC3986, we percent encode the url.
            if not parsed.netloc:
                url = urljoin(resp.url, requote_uri(url))
            else:
                url = requote_uri(url)

            prepared_request.url = to_native_string(url)

            self.rebuild_method(prepared_request, resp)

            # https://github.com/requests/requests/issues/1084
            if resp.status_code not in (codes.temporary_redirect,
                                        codes.permanent_redirect):
                # https://github.com/requests/requests/issues/3490
                purged_headers = ('Content-Length', 'Content-Type',
                                  'Transfer-Encoding')
                for header in purged_headers:
                    prepared_request.headers.pop(header, None)
                prepared_request.body = None

            headers = prepared_request.headers
            try:
                del headers['Cookie']
            except KeyError:
                pass

            # Extract any cookies sent on the response to the cookiejar
            # in the new request. Because we've mutated our copied prepared
            # request, use the old one that we haven't yet touched.
            prepared_request._cookies.extract_cookies(
                MockResponse(HTTPHeaderDict(resp.headers)), MockRequest(req))
            merge_cookies(prepared_request._cookies, self.cookies)
            prepared_request.prepare_cookies(prepared_request._cookies)

            # Rebuild auth and proxy information.
            proxies = self.rebuild_proxies(prepared_request, proxies)
            self.rebuild_auth(prepared_request, resp)

            # Override the original request.
            req = prepared_request
            req.adapt_prepare()

            if yield_requests:
                yield req
            else:
                resp = self.send(req,
                                 stream=stream,
                                 timeout=timeout,
                                 verify=verify,
                                 cert=cert,
                                 proxies=proxies,
                                 allow_redirects=False,
                                 **adapter_kwargs)

                yield resp

                while not resp.done():
                    yield resp
                resp = resp.result()

                self.cookies.extract_cookies(
                    MockResponse(HTTPHeaderDict(resp.headers)),
                    MockRequest(prepared_request))

                # extract redirect url, if any, for the next loop
                url = self.get_redirect_target(resp)
コード例 #19
0
ファイル: portal.py プロジェクト: Catstyle/applied
 def load_cookies_from_backend(self):
     value = self.backend.get(f'{self.username}.cookies')
     if value is MISSING:
         return False
     self.session.cookies = merge_cookies(self.session.cookies,
                                          pickle.loads(value))
コード例 #20
0
ファイル: client.py プロジェクト: timothyaaron/aiorequests
    def request(self, method, url, **kwargs):
        method = method.upper()

        # Join parameters provided in the URL
        # and the ones passed as argument.
        params = kwargs.get('params')
        if params:
            url = _combine_query_params(url, params)

        # Convert headers dictionary to
        # twisted raw headers format.
        headers = kwargs.get('headers')
        if headers:
            _headers = []
            for key, val in headers.items():
                if isinstance(val, list):
                    for v in val:
                        _headers.append((key, v))
                else:
                    _headers.append((key, val))
            headers = _headers
        else:
            headers = {}

        # Here we choose a right producer
        # based on the parameters passed in.
        data = kwargs.get('data')
        files = kwargs.get('files')
        if files:
            # If the files keyword is present we will issue a
            # multipart/form-data request as it suits better for cases
            # with files and/or large objects.

            # TODO: Must check multipart aiohttp support
            files = list(_convert_files(files))
            boundary = uuid.uuid4()
            headers['Content-Type'] = [
                'multipart/form-data; boundary=%s' % (boundary, )
            ]
            if data:
                data = _convert_params(data)
            else:
                data = []
            data += files
        elif data:
            # Otherwise stick to x-www-form-urlencoded format
            # as it's generally faster for smaller requests.
            if isinstance(data, (dict, list, tuple)):
                headers['Content-Type'] = 'application/x-www-form-urlencoded'
                data = urlencode(data, doseq=True)

        cookies = kwargs.get('cookies', {})

        if not isinstance(cookies, CookieJar):
            cookies = cookiejar_from_dict(cookies)

        cookies = merge_cookies(self._cookiejar, cookies)
        allow_redirects = kwargs.get('allow_redirects', True)

        auth = kwargs.get('auth')
        if auth:
            auth = aiohttp.helpers.BasicAuth(*auth)
        else:
            auth = None

        if isinstance(headers, dict):
            headers['accept-encoding'] = 'gzip'
        else:
            headers.append(('accept-encoding', 'gzip'))
        loop = asyncio.get_event_loop()
        timeout = kwargs.get('timeout')

        request_args = {
            'auth': auth,
            'allow_redirects':
            allow_redirects if not allow_redirects else None,
            'headers': headers,
            'data': data,
            'cookies': cookies if cookies else None
        }

        for k in list(request_args.keys()):
            if not request_args[k]:
                request_args.pop(k)

        resp = yield from asyncio.wait_for(
            loop.create_task(aiohttp.request(method, url, **request_args)),
            timeout)

        return _Response(resp, cookies)
コード例 #21
0
ファイル: portal.py プロジェクト: Catstyle/applied
 def renew_req(self, req):
     # req is PreparedRequest
     req.headers.pop('Cookie', '')
     req.prepare_cookies(merge_cookies(req._cookies, self.session.cookies))
     return req
コード例 #22
0
ファイル: obs_ta_idp.py プロジェクト: rtcornwell/splunk
def get_token(IAMurl, UserName, UserPass):
    proxies = None
    # These urls may need to be customized depending if you are using standard Azure SSO or customized.
    getCredentialUrl = "https://login.microsoftonline.com/common/GetCredentialType?mkt=zh-CN"
    passwordUrl = "https://login.live.com/ppsecure/post.srf"
    authUrl = "https://login.microsoftonline.com/common/federation/oauth2"
    microUrl = "https://login.microsoftonline.com"

    encodeUserName = quote(UserName)
    iamResult = http_get(IAMurl, None, proxies=proxies)
    
    imaLocationUrl = iamResult.headers.get("Location")
    result = http_get(imaLocationUrl, None, proxies=proxies)
    if result.status_code!= 200:
        logging.debug("Error on First IamUrl Call: , errorCode:%s" % result.status_code)
        sys.exit(2)
    responseHeader = result.headers
    UserNameGetUrl = result.request.url
    cookies = result.cookies
    microsoftMatchPattern = re.compile("//<!\\[CDATA\\[[\\s]+\\$Config=(.*);[\\s]+//\\]\\]>")
    microsoftMatch = microsoftMatchPattern.findall(result.text.encode("utf-8"))
    microsoftMatchJson = json.loads(microsoftMatch[0])
    UserNamePostHeader = dict()
    UserNamePostHeader.setdefault("hpgrequestid", responseHeader.get("x-ms-request-id"))
    UserNamePostHeader.setdefault("Origin", microUrl)
    UserNamePostHeader.setdefault("canary", microsoftMatchJson.get("apiCanary"))
    UserNamePostHeader.setdefault("client-request-id", microsoftMatchJson.get("correlationId"))
    UserNamePostHeader.setdefault("Content-type", "application/json; charset=UTF-8")
    UserNamePostHeader.setdefault("hpgid", str(microsoftMatchJson.get("hpgid")))
    UserNamePostHeader.setdefault("hpgact", str(microsoftMatchJson.get("hpgact")))
    UserNamePostHeader.setdefault("Referer", UserNameGetUrl)
    UserNamePostHeader.setdefault("Accept", "application/json")
    mircroUrlPost = microsoftMatchJson.get("urlPost")
    bodyGetCredentialType = "{\"UserName\": \"%s\",\"isOtherIdpSupported\": true,\"checkPhones\": false,\"isRemoteNGCSupported\": true,\"isCookieBannerShown\": false,\"isFidoSupported\": false,\"originalRequest\": \"%s\",\"country\":\"DE\",\"forceotclogin\": false,\"flowToken\": \"%s\"}" % (
        encodeUserName, microsoftMatchJson.get("sCtx"), microsoftMatchJson.get("sFT"))

    credentialResponse = http_post(getCredentialUrl, UserNamePostHeader, bodyGetCredentialType, proxies=proxies, cookies=result.cookies)
    if credentialResponse.status_code!= 200:
        logging.debug("Error on credential response: , errorCode:%s" % credentialResponse.status_code)
        sys.exit(2)
    merge_cookies(cookies, credentialResponse.cookies)

    hpgRequestId = None
    sCtx = None
    canary = None
    sFT = None
    kmsiRequestCookies = None

    mircroRequestHeader = dict()
    mircroRequestHeader.setdefault("Origin", microUrl)
    mircroRequestHeader.setdefault("Content-type", "application/x-www-form-urlencoded")
    mircroRequestHeader.setdefault("Referer", UserNameGetUrl)
    mircroRequestHeader.setdefault("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3")
    mircroRequestBody = "i13=0&login=%s&loginfmt=%s&type=11&LoginOptions=3&lrt=&lrtPartition=&hisRegion=&hisScaleUnit=&passwd=%s&ps=2&psRNGCDefaultType=&psRNGCEntropy=&psRNGCSLK=&canary=%s&ctx=%s&hpgrequestid=%s&flowToken=%s&PPSX=&NewUser=1&FoundMSAs=&fspost=0&i21=0&CookieDisclosure=0&IsFidoSupported=1&i2=1&i17=&i18=&i19=15193" % (
        encodeUserName, encodeUserName, quote(UserPass), quote(microsoftMatchJson.get("canary")), microsoftMatchJson.get("sCtx"), responseHeader.get("x-ms-request-id"), microsoftMatchJson.get("sFT"))
    mircroResponse = http_post(mircroUrlPost, mircroRequestHeader, mircroRequestBody, proxies=proxies, cookies=cookies)
    mircroResponseCompile = re.compile("//<!\\[CDATA\\[[\\s]+\\$Config=(.*);[\\s]+//\\]\\]>")
    mircroResponseMatcher = mircroResponseCompile.findall(mircroResponse.text)
    mircroResponseObject = json.loads(mircroResponseMatcher[0])
    hpgRequestId = quote(mircroResponse.headers.get("x-ms-request-id"))
    sCtx = quote(mircroResponseObject.get("sCtx"))
    canary = quote(mircroResponseObject.get("canary"))
    sFT = quote(mircroResponseObject.get("sFT"))
    kmsiRequestCookies = merge_cookies(mircroResponse.cookies, cookies)

    kmsiRequestBody = "LoginOptions=1&ctx=%s&hpgrequestid=%s&flowToken=%s&canary=%s&i2=&i17=&i18=&i19=1784" % (sCtx, hpgRequestId, sFT, canary)
    kmsiHeader = dict()
    kmsiHeader.setdefault("Upgrade-Insecure-Requests", "1")
    kmsiHeader.setdefault("Origin", "https://login.microsoftonline.com")
    kmsiHeader.setdefault("Accept", "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2")
    kmsiHeader.setdefault("Referer", authUrl)
    kmsiHeader.setdefault("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")

    kmsiResponse = http_post("https://login.microsoftonline.com/kmsi", kmsiHeader, kmsiRequestBody, proxies=proxies, cookies=kmsiRequestCookies)
    if kmsiResponse.status_code!= 302:
        logging.debug("Error in KMSI Response: , errorCode:%s" % kmsiResponse.status_code)
        sys.exit(2)
    sAMLResponseCompile = re.compile("name=\"SAMLResponse\"[\\s]+value=\"([\S]*)\"")
    relayStateCompile = re.compile("name=\"RelayState\"[\\s]+value=\"([\S]*)\"")
    sAMLResponse = sAMLResponseCompile.findall(kmsiResponse.text)[0]
    relayState = relayStateCompile.findall(kmsiResponse.text)[0]
    iamPostResponse = http_post("https://iam.eu-de.otc.t-systems.com/v3-ext/auth/OS-FEDERATION/SSO/SAML2/POST", kmsiHeader, "SAMLResponse=%s&RelayState=%s" % (quote(sAMLResponse), relayState), proxies=proxies)
    if iamPostResponse.status_code!= 201:
        logging.debug("Error in KMSI Response: , errorCode:%s" % iamPostResponse.status_code)
        sys.exit(2)
    iamPostLocationResponse = http_get(iamPostResponse.headers.get("Location"), None, proxies=proxies, cookies=iamPostResponse.cookies)
    if iamPostLocationResponse.status_code!= 201:
        logging.debug("Error in KMSI Response: , errorCode:%s" % iamPostLocationResponse.status_code)
        sys.exit(2)
    TokenID = iamPostLocationResponse.headers.get("X-Subject-Token")
    return TokenID
コード例 #23
0
 def set_cookies(self, cookies):
     # from requests.cookies import RequestsCookieJar
     # url = "http://fanyi.baidu.com/v2transapi"
     # cookies = RequestsCookieJar()
     # cookies.set("BAIDUID", "B1CCDD4B4BC886BF99364C72C8AE1C01:FG=1", domain="baidu.com")
     self.__session.cookies = merge_cookies(self.__session.cookies, cookies)
コード例 #24
0
    async def resolve_redirects(self,
                                resp,
                                req,
                                stream=False,
                                timeout=None,
                                verify=True,
                                cert=None,
                                proxies=None,
                                yield_requests=False,
                                **adapter_kwargs):
        """Receives a Response. Returns a generator of Responses or Requests."""
        hist = []  # keep track of history

        url = self.get_redirect_target(resp)
        previous_fragment = urlparse(req.url).fragment
        while url:
            prepared_request = req.copy()

            # Update history and keep track of redirects.
            # resp.history must ignore the original request in this loop
            hist.append(resp)
            resp.history = hist[1:]

            try:
                resp.content  # Consume socket so it can be released
            except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
                resp.raw.read(decode_content=False)

            if len(resp.history) >= self.max_redirects:
                raise TooManyRedirects("Exceeded %s redirects." %
                                       self.max_redirects,
                                       response=resp)

            # Release the connection back into the pool.
            resp.close()

            # Handle redirection without scheme (see: RFC 1808 Section 4)
            if url.startswith("//"):
                parsed_rurl = urlparse(resp.url)
                url = "%s:%s" % (to_native_string(parsed_rurl.scheme), url)

            # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
            parsed = urlparse(url)
            if parsed.fragment == "" and previous_fragment:
                parsed = parsed._replace(fragment=previous_fragment)
            elif parsed.fragment:
                previous_fragment = parsed.fragment
            url = parsed.geturl()

            # Facilitate relative 'location' headers, as allowed by RFC 7231.
            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
            # Compliant with RFC3986, we percent encode the url.
            if not parsed.netloc:
                url = urljoin(resp.url, requote_uri(url))
            else:
                url = requote_uri(url)

            prepared_request.url = to_native_string(url)

            self.rebuild_method(prepared_request, resp)

            # https://github.com/requests/requests/issues/1084
            if resp.status_code not in (
                    codes.temporary_redirect,
                    codes.permanent_redirect,
            ):
                # https://github.com/requests/requests/issues/3490
                purged_headers = ("Content-Length", "Content-Type",
                                  "Transfer-Encoding")
                for header in purged_headers:
                    prepared_request.headers.pop(header, None)
                prepared_request.body = None

            headers = prepared_request.headers
            try:
                del headers["Cookie"]
            except KeyError:
                pass

            # Extract any cookies sent on the response to the cookiejar
            # in the new request. Because we've mutated our copied prepared
            # request, use the old one that we haven't yet touched.
            extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
            merge_cookies(prepared_request._cookies, self.cookies)
            prepared_request.prepare_cookies(prepared_request._cookies)

            # Rebuild auth and proxy information.
            proxies = self.rebuild_proxies(prepared_request, proxies)
            self.rebuild_auth(prepared_request, resp)

            # A failed tell() sets `_body_position` to `object()`. This non-None
            # value ensures `rewindable` will be True, allowing us to raise an
            # UnrewindableBodyError, instead of hanging the connection.
            rewindable = prepared_request._body_position is not None and (
                "Content-Length" in headers or "Transfer-Encoding" in headers)

            # Attempt to rewind consumed file-like object.
            if rewindable:
                rewind_body(prepared_request)

            # Override the original request.
            req = prepared_request

            if yield_requests:
                yield req
            else:

                resp = await self.send(req,
                                       stream=stream,
                                       timeout=timeout,
                                       verify=verify,
                                       cert=cert,
                                       proxies=proxies,
                                       allow_redirects=False,
                                       **adapter_kwargs)

                extract_cookies_to_jar(self.cookies, prepared_request,
                                       resp.raw)

                # extract redirect url, if any, for the next loop
                url = self.get_redirect_target(resp)
                yield resp
コード例 #25
0
ファイル: client.py プロジェクト: fivestars/treq
    def request(self, method, url, **kwargs):
        """
        See :func:`treq.request()`.
        """
        method = method.encode('ascii').upper()

        if isinstance(url, unicode):
            parsed_url = URL.from_text(url)
        else:
            parsed_url = URL.from_text(url.decode('ascii'))

        # Join parameters provided in the URL
        # and the ones passed as argument.
        params = kwargs.get('params')
        if params:
            parsed_url = parsed_url.replace(
                query=parsed_url.query + tuple(_coerced_query_params(params)))

        url = parsed_url.to_uri().to_text().encode('ascii')

        # Convert headers dictionary to
        # twisted raw headers format.
        headers = kwargs.get('headers')
        if headers:
            if isinstance(headers, dict):
                h = Headers({})
                for k, v in headers.items():
                    if isinstance(v, (bytes, unicode)):
                        h.addRawHeader(k, v)
                    elif isinstance(v, list):
                        h.setRawHeaders(k, v)

                headers = h
        else:
            headers = Headers({})

        # Here we choose a right producer
        # based on the parameters passed in.
        bodyProducer = None
        data = kwargs.get('data')
        files = kwargs.get('files')
        # since json=None needs to be serialized as 'null', we need to
        # explicitly check kwargs for this key
        has_json = 'json' in kwargs

        if files:
            # If the files keyword is present we will issue a
            # multipart/form-data request as it suits better for cases
            # with files and/or large objects.
            files = list(_convert_files(files))
            boundary = str(uuid.uuid4()).encode('ascii')
            headers.setRawHeaders(
                b'content-type',
                [b'multipart/form-data; boundary=' + boundary])
            if data:
                data = _convert_params(data)
            else:
                data = []

            bodyProducer = multipart.MultiPartProducer(data + files,
                                                       boundary=boundary)
        elif data:
            # Otherwise stick to x-www-form-urlencoded format
            # as it's generally faster for smaller requests.
            if isinstance(data, (dict, list, tuple)):
                headers.setRawHeaders(b'content-type',
                                      [b'application/x-www-form-urlencoded'])
                data = urlencode(data, doseq=True)
            bodyProducer = self._data_to_body_producer(data)
        elif has_json:
            # If data is sent as json, set Content-Type as 'application/json'
            headers.setRawHeaders(b'content-type',
                                  [b'application/json; charset=UTF-8'])
            content = kwargs['json']
            json = json_dumps(content, separators=(u',', u':')).encode('utf-8')
            bodyProducer = self._data_to_body_producer(json)

        cookies = kwargs.get('cookies', {})

        if not isinstance(cookies, CookieJar):
            cookies = cookiejar_from_dict(cookies)

        cookies = merge_cookies(self._cookiejar, cookies)
        wrapped_agent = CookieAgent(self._agent, cookies)

        if kwargs.get('allow_redirects', True):
            if kwargs.get('browser_like_redirects', False):
                wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent)
            else:
                wrapped_agent = RedirectAgent(wrapped_agent)

        wrapped_agent = ContentDecoderAgent(wrapped_agent,
                                            [(b'gzip', GzipDecoder)])

        auth = kwargs.get('auth')
        if auth:
            wrapped_agent = add_auth(wrapped_agent, auth)

        d = wrapped_agent.request(method,
                                  url,
                                  headers=headers,
                                  bodyProducer=bodyProducer)

        timeout = kwargs.get('timeout')
        if timeout:
            delayedCall = default_reactor(kwargs.get('reactor')).callLater(
                timeout, d.cancel)

            def gotResult(result):
                if delayedCall.active():
                    delayedCall.cancel()
                return result

            d.addBoth(gotResult)

        if not kwargs.get('unbuffered', False):
            d.addCallback(_BufferedResponse)

        return d.addCallback(_Response, cookies)
コード例 #26
0
ファイル: client.py プロジェクト: jsandovalc/aiorequests
    def request(self, method, url, **kwargs):
        method = method.upper()

        # Join parameters provided in the URL
        # and the ones passed as argument.
        params = kwargs.get('params')
        if params:
            url = _combine_query_params(url, params)

        # Convert headers dictionary to
        # twisted raw headers format.
        headers = kwargs.get('headers')
        if headers:
            _headers = []
            for key, val in headers.items():
                if isinstance(val, list):
                    for v in val:
                        _headers.append((key, v))
                else:
                    _headers.append((key, val))
            headers = _headers
        else:
            headers = {}


        # Here we choose a right producer
        # based on the parameters passed in.
        data = kwargs.get('data')
        files = kwargs.get('files')
        if files:
            # If the files keyword is present we will issue a
            # multipart/form-data request as it suits better for cases
            # with files and/or large objects.

            # TODO: Must check multipart aiohttp support
            files = list(_convert_files(files))
            boundary = uuid.uuid4()
            headers['Content-Type'] = ['multipart/form-data; boundary=%s' %
                                       (boundary,)]
            if data:
                data = _convert_params(data)
            else:
                data = []
            data += files
        elif data:
            # Otherwise stick to x-www-form-urlencoded format
            # as it's generally faster for smaller requests.
            if isinstance(data, (dict, list, tuple)):
                headers['Content-Type'] = 'application/x-www-form-urlencoded'
                data = urlencode(data, doseq=True)

        cookies = kwargs.get('cookies', {})

        if not isinstance(cookies, CookieJar):
            cookies = cookiejar_from_dict(cookies)

        cookies = merge_cookies(self._cookiejar, cookies)
        allow_redirects = kwargs.get('allow_redirects', True)

        auth = kwargs.get('auth')
        if auth:
            auth = aiohttp.helpers.BasicAuth(*auth)
        else:
            auth = None

        if isinstance(headers, dict):
            headers['accept-encoding'] = 'gzip'
        else:
            headers.append(('accept-encoding', 'gzip'))
        loop = asyncio.get_event_loop()
        timeout = kwargs.get('timeout')

        request_args = {
            'auth': auth,
            'allow_redirects': allow_redirects if not allow_redirects else None,
            'headers': headers,
            'data': data,
            'cookies': cookies if cookies else None
        }

        for k in list(request_args.keys()):
            if not request_args[k]:
                request_args.pop(k)

        resp = yield from asyncio.wait_for(loop.create_task(aiohttp.request(
            method, url, **request_args)), timeout)


        return _Response(resp, cookies)
コード例 #27
0
    def request(self, method, url, **kwargs):
        """
        See :func:`treq.request()`.
        """
        method = method.encode('ascii').upper()
        stacklevel = kwargs.pop('_stacklevel', 2)

        if isinstance(url, DecodedURL):
            parsed_url = url
        elif isinstance(url, EncodedURL):
            parsed_url = DecodedURL(url)
        elif isinstance(url, six.text_type):
            parsed_url = DecodedURL.from_text(url)
        else:
            parsed_url = DecodedURL.from_text(url.decode('ascii'))

        # Join parameters provided in the URL
        # and the ones passed as argument.
        params = kwargs.pop('params', None)
        if params:
            parsed_url = parsed_url.replace(
                query=parsed_url.query + tuple(_coerced_query_params(params)))

        url = parsed_url.to_uri().to_text().encode('ascii')

        # Convert headers dictionary to
        # twisted raw headers format.
        headers = kwargs.pop('headers', None)
        if headers:
            if isinstance(headers, dict):
                h = Headers({})
                for k, v in headers.items():
                    if isinstance(v, (bytes, six.text_type)):
                        h.addRawHeader(k, v)
                    elif isinstance(v, list):
                        h.setRawHeaders(k, v)

                headers = h
        else:
            headers = Headers({})

        bodyProducer, contentType = self._request_body(
            data=kwargs.pop('data', None),
            files=kwargs.pop('files', None),
            json=kwargs.pop('json', _NOTHING),
            stacklevel=stacklevel,
        )
        if contentType is not None:
            headers.setRawHeaders(b'Content-Type', [contentType])

        cookies = kwargs.pop('cookies', {})

        if not isinstance(cookies, CookieJar):
            cookies = cookiejar_from_dict(cookies)

        cookies = merge_cookies(self._cookiejar, cookies)
        wrapped_agent = CookieAgent(self._agent, cookies)

        browser_like_redirects = kwargs.pop('browser_like_redirects', False)
        if kwargs.pop('allow_redirects', True):
            if browser_like_redirects:
                wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent)
            else:
                wrapped_agent = RedirectAgent(wrapped_agent)

        wrapped_agent = ContentDecoderAgent(wrapped_agent,
                                            [(b'gzip', GzipDecoder)])

        auth = kwargs.pop('auth', None)
        if auth:
            wrapped_agent = add_auth(wrapped_agent, auth)

        d = wrapped_agent.request(method,
                                  url,
                                  headers=headers,
                                  bodyProducer=bodyProducer)

        reactor = kwargs.pop('reactor', None)
        if reactor is None:
            from twisted.internet import reactor
        timeout = kwargs.pop('timeout', None)
        if timeout:
            delayedCall = reactor.callLater(timeout, d.cancel)

            def gotResult(result):
                if delayedCall.active():
                    delayedCall.cancel()
                return result

            d.addBoth(gotResult)

        if not kwargs.pop('unbuffered', False):
            d.addCallback(_BufferedResponse)

        if kwargs:
            warnings.warn(
                ("Got unexpected keyword argument: {}."
                 " treq will ignore this argument,"
                 " but will raise TypeError in the next treq release.").format(
                     ", ".join(repr(k) for k in kwargs)),
                DeprecationWarning,
                stacklevel=stacklevel,
            )

        return d.addCallback(_Response, cookies)
コード例 #28
0
ファイル: client.py プロジェクト: jameshilliard/treq
    def request(self, method, url, **kwargs):
        method = method.encode('ascii').upper()

        # Join parameters provided in the URL
        # and the ones passed as argument.
        params = kwargs.get('params')
        if params:
            url = _combine_query_params(url, params)

        if isinstance(url, unicode):
            url = URL.fromText(url).asURI().asText().encode('ascii')

        # Convert headers dictionary to
        # twisted raw headers format.
        headers = kwargs.get('headers')
        if headers:
            if isinstance(headers, dict):
                h = Headers({})
                for k, v in headers.items():
                    if isinstance(v, (bytes, unicode)):
                        h.addRawHeader(k, v)
                    elif isinstance(v, list):
                        h.setRawHeaders(k, v)

                headers = h
        else:
            headers = Headers({})

        # Here we choose a right producer
        # based on the parameters passed in.
        bodyProducer = None
        data = kwargs.get('data')
        files = kwargs.get('files')
        # since json=None needs to be serialized as 'null', we need to
        # explicitly check kwargs for this key
        has_json = 'json' in kwargs

        if files:
            # If the files keyword is present we will issue a
            # multipart/form-data request as it suits better for cases
            # with files and/or large objects.
            files = list(_convert_files(files))
            boundary = str(uuid.uuid4()).encode('ascii')
            headers.setRawHeaders(
                b'content-type', [
                    b'multipart/form-data; boundary=' + boundary])
            if data:
                data = _convert_params(data)
            else:
                data = []

            bodyProducer = multipart.MultiPartProducer(
                data + files, boundary=boundary)
        elif data:
            # Otherwise stick to x-www-form-urlencoded format
            # as it's generally faster for smaller requests.
            if isinstance(data, (dict, list, tuple)):
                headers.setRawHeaders(
                    b'content-type', [b'application/x-www-form-urlencoded'])
                data = urlencode(data, doseq=True)
            bodyProducer = self._data_to_body_producer(data)
        elif has_json:
            # If data is sent as json, set Content-Type as 'application/json'
            headers.setRawHeaders(
                b'content-type', [b'application/json; charset=UTF-8'])
            content = kwargs['json']
            json = json_dumps(content, separators=(u',', u':')).encode('utf-8')
            bodyProducer = self._data_to_body_producer(json)

        cookies = kwargs.get('cookies', {})

        if not isinstance(cookies, CookieJar):
            cookies = cookiejar_from_dict(cookies)

        cookies = merge_cookies(self._cookiejar, cookies)
        wrapped_agent = CookieAgent(self._agent, cookies)

        if kwargs.get('allow_redirects', True):
            if kwargs.get('browser_like_redirects', False):
                wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent)
            else:
                wrapped_agent = RedirectAgent(wrapped_agent)

        wrapped_agent = ContentDecoderAgent(wrapped_agent,
                                            [(b'gzip', GzipDecoder)])

        auth = kwargs.get('auth')
        if auth:
            wrapped_agent = add_auth(wrapped_agent, auth)

        d = wrapped_agent.request(
            method, url, headers=headers,
            bodyProducer=bodyProducer)

        timeout = kwargs.get('timeout')
        if timeout:
            delayedCall = default_reactor(kwargs.get('reactor')).callLater(
                timeout, d.cancel)

            def gotResult(result):
                if delayedCall.active():
                    delayedCall.cancel()
                return result

            d.addBoth(gotResult)

        if not kwargs.get('unbuffered', False):
            d.addCallback(_BufferedResponse)

        return d.addCallback(_Response, cookies)