def send(self, request: PreparedRequest, stream=False, timeout=None, verify=True, cert=None, proxies=None): request.url = request.url.decode("utf-8") if isinstance( request.url, bytes) else request.url # Check URL, ask the fake Icinga to handle it self.check_path(request.url) resp = self.icinga.handle(request) # Create response, emulate equests.adapters.HTTPAdapter behavior a bit response = Response() response.status_code = resp.get("status_code", None) response.headers = CaseInsensitiveDict(resp.get("headers", {})) response.encoding = get_encoding_from_headers(response.headers) response.reason = resp.get("reason", None) response.url = request.url # Already decoded above response.request = request response.raw = StreamableBytesIO(resp.get("body", "").encode("utf-8")) # Cookie jar is not mocked, as Icinga doesn't use cookies # response.connection is not mocked, because it's not a response attribute by default # Call settings hook with the settings used for this request (to make testing with these easier) self.settings_hook(stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) return response
def response(status_code=200, content='', headers=None, reason=None, elapsed=0, request=None, stream=False, http_vsn=11): res = requests.Response() res.status_code = status_code if isinstance(content, (dict, list)): content = json.dumps(content).encode('utf-8') if isinstance(content, text_type): content = content.encode('utf-8') res._content = content res._content_consumed = content res.headers = structures.CaseInsensitiveDict(headers or {}) res.encoding = utils.get_encoding_from_headers(res.headers) res.reason = reason res.elapsed = datetime.timedelta(elapsed) res.request = request if hasattr(request, 'url'): res.url = request.url if isinstance(request.url, bytes): res.url = request.url.decode('utf-8') if 'set-cookie' in res.headers: res.cookies.extract_cookies(cookies.MockResponse(Headers(res)), cookies.MockRequest(request)) if stream: res.raw = BytesIO(content) else: res.raw = BytesIO(b'') res.raw.version = http_vsn # normally this closes the underlying connection, # but we have nothing to free. res.close = lambda *args, **kwargs: None return res
def guess_response_encoding(resp): ''' Guess the content encoding of a requests response. Note: there's a performance issue due to chardet. ''' # first try the encoding supplied by responce header and content encs = get_encodings_from_content(resp.content) or [] for enc in encs: try: resp.content.decode(enc) LOG.info('Detected encoding %s from response content.', enc) return enc except UnicodeDecodeError: LOG.debug('Encoding from response content doesn\'t work.') enc = get_encoding_from_headers(resp.headers) if enc: try: resp.content.decode(enc) LOG.info('Detected encoding %s from response header.', enc) return enc except UnicodeDecodeError: LOG.debug('Encoding from response header doesn\'t work.') # neither encoding works, we have to go the hard way. start = clock() g = detect(resp.content) LOG.info('Detected encoding %s with cofidence of %g in %gs.' % (g['encoding'], g['confidence'], clock() - start)) return g['encoding']
def build_response(self, req, resp): response = requests.Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = resp.status_code # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.status if isinstance(req.url, bytes): response.url = req.url.decode("utf-8") else: response.url = req.url # Give the Response some context. response.request = req response.connection = self response._content = resp.data return response
def response(self, status_code=200, content='', headers=None, reason=None, elapsed=0, request=None, stream=False): res = requests.Response() res.status_code = status_code if isinstance(content, (dict, list)): content = json.dumps(content).encode('utf-8') if isinstance(content, str): content = content.encode('utf-8') res._content = content res._content_consumed = content res.headers = structures.CaseInsensitiveDict(headers or {}) res.encoding = utils.get_encoding_from_headers(res.headers) res.reason = reason res.request = request if hasattr(request, 'url'): res.url = request.url if isinstance(request.url, bytes): res.url = request.url.decode('utf-8') if stream: res.raw = BytesIO(content) else: res.raw = BytesIO(b'') # normally this closes the underlying connection, # but we have nothing to free. res.close = lambda *args, **kwargs: None return res
def build_response(self, request, resp): """ Builds a Requests' response object. This emulates most of the logic of the standard fuction but deals with the lack of the ``.headers`` property on the HTTP20Response object. """ response = Response() response.status_code = resp.status response.headers = CaseInsensitiveDict(resp.getheaders()) response.raw = resp response.reason = resp.reason response.encoding = get_encoding_from_headers(response.headers) extract_cookies_to_jar(response.cookies, request, response) response.url = request.url response.request = request response.connection = self # One last horrible patch: Requests expects its raw responses to have a # release_conn method, which I don't. We should monkeypatch a no-op on. resp.release_conn = lambda: None return response
def get_unicode_from_request(r): """Returns the requested content back in unicode. :param r: Request object to get unicode content from. Try: 1. charset from content-type 2. fall back and assume utf-8 3. latin-1 replacing unicode chars """ if isinstance(r.body, unicode): return r.body tried_encodings = [] # Try charset from content-type # i.e. "Content-type: text/plain; charset=us-ascii" encoding = get_encoding_from_headers(CaseInsensitiveDict(r.headers)) if encoding: try: return unicode(r.body, encoding) except UnicodeError: tried_encodings.append(encoding) # workaround if encoding is not specified, assume utf-8, then latin-1 try: return unicode(r.body, 'utf-8') except: tried_encodings.append('utf-8') return unicode(r.body, 'latin-1', errors='replace')
def _receive_response(self, task, response): """ Called by the delegate when a response has been received. This call is expected only on background threads, and thus may not do anything that is not Python-thread-safe. This means that, for example, it is safe to grab things from the _tasks dictionary, but it is not safe to make other method calls on this object unless they explicitly state that they are safe in background threads. """ queue, request = self._tasks[task] resp = Response() resp.status_code = getKey(response, 'statusCode') resp.reason = '' # TODO: Why do I have to do this? raw_headers = getKey(response, 'allHeaderFields') resp.headers = CaseInsensitiveDict(raw_headers) resp.encoding = get_encoding_from_headers(resp.headers) # TODO: This needs to point to an object that we can use to provide # the various raw things that requests needs. resp.raw = None if isinstance(request.url, bytes): resp.url = request.url.decode('utf-8') else: resp.url = request.url resp.request = request resp.connection = self # Put this response on the queue. queue.put_nowait(resp)
def build_response(self, request, resp): """ Builds a Requests' response object. This emulates most of the logic of the standard fuction but deals with the lack of the ``.headers`` property on the HTTP20Response object. """ response = Response() response.status_code = resp.status response.headers = CaseInsensitiveDict(resp.getheaders()) response.raw = resp response.reason = resp.reason response.encoding = get_encoding_from_headers(response.headers) extract_cookies_to_jar(response.cookies, request, response) if isinstance(request.url, bytes): response.url = request.url.decode('utf-8') else: response.url = request.url response.request = request response.connection = self # One last horrible patch: Requests expects its raw responses to have a # release_conn method, which I don't. We should monkeypatch a no-op on. resp.release_conn = lambda: None return response
def encoding(self): if hasattr(self, "_encoding"): return self._encoding # content is unicode if isinstance(self.content, unicode): return "unicode" # Try charset from content-type encoding = get_encoding_from_headers(self.headers) if encoding == "ISO-8859-1": encoding = None # Try charset from content if not encoding: encoding = get_encodings_from_content(self.content) encoding = encoding and encoding[0] or None # Fallback to auto-detected encoding. if not encoding and chardet is not None: encoding = chardet.detect(self.content)["encoding"] if encoding and encoding.lower() == "gb2312": encoding = "gb18030" self._encoding = encoding or "utf-8" return self._encoding
def to_requests_response(self): """Returns an instance of `requests.Response` based on this response. Returns: request.Response: the generated response. """ # Make sure that body is at position 0 before returning self.body.seek(0) urllib3_response = URLLib3Rresponse( body=self.body, headers=self.headers, status=self.http_code, request_method=self.request.method, reason=self.reason, preload_content=False ) response = RequestResponse() response.request = self.request response.raw = urllib3_response response.status_code = self.http_code response.reason = self.reason response.headers = CaseInsensitiveDict(response.raw.headers) response.encoding = get_encoding_from_headers(response.headers) extract_cookies_to_jar(response.cookies, self.request, urllib3_response) if isinstance(self.request.url, six.binary_type): response.url = self.request.url.decode("utf-8") else: response.url = self.request.url return response
def encoding(rsp): """ encoding of Response.content. if Response.encoding is None, encoding will be guessed by header or content or chardet if avaibable. """ # content is unicode if isinstance(rsp.content, six.text_type): return 'unicode' # Try charset from content-type encoding = get_encoding_from_headers(rsp.headers) if encoding == 'ISO-8859-1': encoding = None # Try charset from content if not encoding and get_encodings_from_content: encoding = get_encodings_from_content(rsp.content) encoding = encoding and encoding[0] or None # Fallback to auto-detected encoding. if not encoding and chardet is not None: encoding = chardet.detect(rsp.content)['encoding'] if encoding and encoding.lower() == 'gb2312': encoding = 'gb18030' encoding = encoding or 'utf-8' return encoding
def find_encoding(content, headers=None): # content is unicode if isinstance(content, unicode): return 'unicode' encoding = None # Try charset from content-type if headers: encoding = get_encoding_from_headers(headers) if encoding == 'ISO-8859-1': encoding = None # Try charset from content if not encoding: encoding = get_encodings_from_content(content) encoding = encoding and encoding[0] or None # Fallback to auto-detected encoding. if not encoding and chardet is not None: encoding = chardet.detect(content)['encoding'] if encoding and encoding.lower() == 'gb2312': encoding = 'gb18030' return encoding or 'latin_1'
def build_response(request, status_code=200, headers={}, content='(none)'): """ Build a :class:`requests.Response` object on the basis of the passed parameters. """ response = Response() response.status_code = status_code response.reason = responses[status_code] response.headers = CaseInsensitiveDict(headers) # Pretend that we've already read from the socket response._content = content response.encoding = get_encoding_from_headers(response.headers) response.url = request.url response.raw = MockRawResponse() # Give the Response some context. response.request = request response.connection = MockConnection() return response
def find_encoding(content, headers=None): # content is unicode if isinstance(content, str): return 'utf-8' encoding = None # Try charset from content-type if headers: encoding = get_encoding_from_headers(headers) if encoding == 'ISO-8859-1': encoding = None # Fallback to auto-detected encoding. if not encoding and chardet is not None: encoding = chardet.detect(content)['encoding'] # Try charset from content if not encoding: try: encoding = get_encodings_from_content(content) encoding = encoding and encoding[0] or None except: if isinstance(content, bytes): return encoding or 'utf-8' if encoding and encoding.lower() == 'gb2312': encoding = 'gb18030' return encoding or 'latin_1'
def send(self, request, **kwargs): url = urlparse(request.url) if url.scheme != 'https': raise Exception('Only HTTPS is supported!') ctx = self._make_context() conn = httpslib.HTTPSConnection( url.hostname, url.port or 443, ssl_context=ctx) conn.request(request.method, url.path, request.body, request.headers) resp = conn.getresponse() response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(resp, 'status', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(request.url, bytes): response.url = request.url.decode('utf-8') else: response.url = request.url # Give the Response some context. response.request = request response.connection = self return response
def start_response(status, headers): response.status_code = int(status.split(' ')[0]) response.reason = responses.get(response.status_code, 'Unknown Status Code') response.headers = CaseInsensitiveDict(headers) response.encoding = get_encoding_from_headers(response.headers) response.elapsed = datetime.datetime.utcnow() - start self._log(response)
def build_response(self, req, resp): """Builds a :class:`Response <requests.Response>` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. :param resp: The urllib3 response object. """ response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(resp, 'status', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Add new cookies from the server. extract_cookies_to_jar(response.cookies, req, resp) # Give the Response some context. response.request = req response.connection = self return response
def currency_rates(code): resp = get('http://www.cbr.ru/scripts/XML_daily.asp') encode = utils.get_encoding_from_headers(resp.headers) valute_string = resp.content.decode(encoding=encode) resp.close() val_curs_date = valute_string[valute_string.find('Date="') + 6:valute_string.find('Date="') + 16].split('.') val_curs_date = date(year=int(val_curs_date[2]), month=int(val_curs_date[1]), day=int(val_curs_date[0])) find_charcode = valute_string.find(f'<CharCode>{code}') if find_charcode == -1: return ['ВАЛЮТА НЕ НАЙДЕНА', None, code, None], None, val_curs_date find_ending = find_charcode + valute_string[find_charcode:].find( '</Valute>') valute_string = valute_string[find_charcode:find_ending] data_list = [] for num in range(4): data_start = valute_string.find('>') + 1 data_end = valute_string.find('</') data_list.append(valute_string[data_start:data_end]) valute_string = valute_string[data_end + 12:] data_list[1] = int(data_list[1]) rate = Decimal(data_list[3].replace(',', '.')) / data_list[1] data_list[3] = round(Decimal(data_list[3].replace(',', '.')), 2) return data_list, rate, val_curs_date
def build_response_into(self, response, req, urllib3_resp): """Same as requests.adapters.HTTPAdapter.build_response, but writes into a provided requests.Response object instead of creating a new one. """ # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(urllib3_resp, 'status', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict( getattr(urllib3_resp, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = urllib3_resp response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Add new cookies from the server. extract_cookies_to_jar(response.cookies, req, urllib3_resp) # Give the Response some context. response.request = req response.connection = self
def currency_rates(argv): if type(argv) == list: programm, valute = argv else: valute = argv response = get('https://www.cbr.ru/scripts/XML_daily.asp') enc = utils.get_encoding_from_headers(response.headers) content = response.content.decode(enc) # Get date date_find = 'Date="' date_pos = content.find(date_find) date_len = len('2020.20.20') date_str = content[date_pos + len(date_find):date_pos + len(date_find) + date_len] date_arr = date_str.split('.') date_obj = date(int(date_arr[2]), int(date_arr[1]), int(date_arr[0])) # Get value valute valute_find = valute.upper() tag_start = '<Value>' tag_stop = '</Value>' valute_pos = content.find(valute_find) if valute_pos < 0: return None value_start = content[valute_pos:].find(tag_start) + len(tag_start) value_stop = content[valute_pos:].find(tag_stop) valute_value = float(content[valute_pos + value_start:valute_pos + value_stop].replace(',', '.')) return valute_value, date_obj
def get_currency_rate(currency_code): """ :param currency_code: :return: """ response = requests.get('http://www.cbr.ru/scripts/XML_daily.asp') encodings = utils.get_encoding_from_headers(response.headers) content = response.content.decode(encoding=encodings) if 'Date="' in content: cutting_edge = content.index('Date="') temp_date = content[cutting_edge + 6:cutting_edge + 16].split('.') bank_date = datetime(year=int(temp_date[2]), month=int(temp_date[1]), day=int(temp_date[0])) currency_code = str.upper(currency_code) if currency_code in content: cutting_edge = content.index(currency_code) content = content[cutting_edge:] if '<Value>' in content: cutting_edge = content.index('<Value>') content = content[cutting_edge + 7:cutting_edge + 17].split('<') currency = Decimal(float(content[0].replace(',', '.'))) print( f"{currency_code} {currency:6.4} {bank_date.strftime('%Y-%m-%d')}") else: print('None') exit(0)
def encoding(self): if hasattr(self, '_encoding'): return self._encoding # content is unicode if isinstance(self.content, unicode): return 'unicode' # Try charset from content-type encoding = get_encoding_from_headers(self.headers) if encoding == 'ISO-8859-1': encoding = None # Try charset from content if not encoding: encoding = get_encodings_from_content(self.content) encoding = encoding and encoding[0] or None # Fallback to auto-detected encoding. if not encoding and chardet is not None: encoding = chardet.detect(self.content)['encoding'] if encoding and encoding.lower() == 'gb2312': encoding = 'gb18030' self._encoding = encoding or 'utf-8' return self._encoding
def http_response_to_response(self, http_response, prepared_request): """ transform a WSGIResponse into a requests's Response model :param django.http.response.HttpResponse http_response: the http response send by django view :return: the requests's Response model corresponding to the http_response :rtype: Response """ response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(http_response, 'status_code', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(http_response._headers, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = http_response response.reason = response.raw.reason_phrase response._content = http_response.content req = prepared_request if isinstance(req.url, bytes): # pragma: no cover response.url = req.url.decode('utf-8') else: response.url = req.url # Add new cookies from the server. extract_cookies_to_jar(response.cookies, req, response) # Give the Response some context. response.request = req response.connection = self return response
def request(method, url, **kwargs): if 'data' in kwargs: kwargs['params'] = kwargs.pop('data') elif 'params' in kwargs and kwargs['params'] is None: kwargs.pop('params') auth = None if 'auth' in kwargs: auth = kwargs.pop('auth') for i in ['auth', 'allow_redirects', 'stream']: if i in kwargs: kwargs.pop(i) if app.app.registry.api_url in url: if auth: authorization = api.authorization api.authorization = ('Basic', auth) resp = api._gen_request(method.upper(), url, expect_errors=True, **kwargs) if auth: api.authorization = authorization else: resp = app._gen_request(method.upper(), url, expect_errors=True, **kwargs) response = Response() response.status_code = resp.status_int response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response._content = resp.body response.reason = resp.status if isinstance(url, bytes): response.url = url.decode('utf-8') else: response.url = url response.request = resp.request return response
def create_response(request, **kwargs): """ :param int status_code: The status code to return upon a successful match. Defaults to 200. :param HTTPResponse raw: A HTTPResponse object to return upon a successful match. :param io.IOBase body: An IO object with a read() method that can return a body on successful match. :param bytes content: A byte string to return upon a successful match. :param unicode text: A text string to return upon a successful match. :param object json: A python object to be converted to a JSON string and returned upon a successful match. :param dict headers: A dictionary object containing headers that are returned upon a successful match. :param CookieJar cookies: A cookie jar with cookies to set on the response. """ connection = kwargs.pop('connection', _FakeConnection()) _check_body_arguments(**kwargs) raw = kwargs.pop('raw', None) body = kwargs.pop('body', None) content = kwargs.pop('content', None) text = kwargs.pop('text', None) json = kwargs.pop('json', None) headers = kwargs.pop('headers', {}) encoding = None if content is not None and not isinstance(content, six.binary_type): raise TypeError('Content should be binary data') if text is not None and not isinstance(text, six.string_types): raise TypeError('Text should be string data') if json is not None: text = jsonutils.dumps(json) if text is not None: encoding = get_encoding_from_headers(headers) or 'utf-8' content = text.encode(encoding) if content is not None: body = _IOReader(content) if not raw: raw = HTTPResponse(status=kwargs.get('status_code', _DEFAULT_STATUS), headers=headers, reason=kwargs.get('reason'), body=body or _IOReader(six.b('')), decode_content=False, preload_content=False, original_response=compat._fake_http_response) response = _http_adapter.build_response(request, raw) response.connection = connection if encoding and not response.encoding: response.encoding = encoding _extract_cookies(request, response, kwargs.get('cookies')) return response
class PassThroughProxyDownloader(DownloaderBase): def __init__(self, user_agent, proxy): self.user_agent = user_agent web_proxy_split = proxy.split(':') self.proxy_ip = web_proxy_split[0] self.proxy_port = int(web_proxy_split[1]) def download(self, url, referrer=None, if_modified_since=None, if_none_match=None): request = [ "GET %s HTTP/1.1" % url, "User-Agent: %s" % self.user_agent, "Accept-Encoding: gzip, deflate, compress", "Accept: */*" "Host: %s" % urlparse(url).hostname, "Connection: Close" # this may not be necessary ] for header, value in CommonHeaders.iteritems(): request.append("%s: %s" % (header, value)) for header, value in DownloaderBase.construct_headers(referrer, if_modified_since, if_none_match).iteritems(): request.append("%s: %s" % (header, value)) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.connect((self.proxy_ip, self.proxy_port)) except socket.error, ex: raise ProxyDownException(ex) s.send("\r\n".join(request) + '\r\n\r\n') r = HTTPResponse(s, strict=False, method='GET', buffering=True) r.begin() resp = urllib3Response.from_httplib(r, decode_content=False) s.close() r.close() response = requests.Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(resp, 'status', None) response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) response.encoding = get_encoding_from_headers(response.headers) response._content = resp.data response.raw = resp response.reason = response.raw.reason response.url = url # don't care about cookies right now #extract_cookies_to_jar(response.cookies, req, resp) # don't worry about the requests' Request object right now until it is needed #response.request = req response.request = None return response
def __init__(self, response, method): self._response = response self._method = method self.status_code = response.code self.headers = CaseInsensitiveDict(( (header, ', '.join(values)) for header, values in response.headers.getAllRawHeaders())) self.encoding = get_encoding_from_headers(self.headers) or 'ISO-8859-1' self._waiting_for_content = []
def send(self, request, **kwargs): response = Response() response.status_code = 200 response.reason = 'OK' response.headers = { 'Content-Type': 'text/html; charset=UTF-8', } response.encoding = get_encoding_from_headers(response.headers) response.raw = BytesIO(b'<!doctype html><html>Hello</html>') return response
def start_response(status, headers, exc_info=None): headers = make_headers(headers) response.status_code = int(status.split(" ")[0]) response.reason = responses.get(response.status_code, "Unknown Status Code") response.headers = headers resp._original_response.msg = headers extract_cookies_to_jar(response.cookies, request, resp) response.encoding = get_encoding_from_headers(response.headers) response.elapsed = datetime.datetime.utcnow() - start self._log(response)
def build_response(req, delegate, cookiestore): # type: (PreparedRequest, NSURLSessionAdapterDelegate, NSHTTPCookieStorage) -> Response """Build a requests `Response` object from the response data collected by the NSURLSessionDelegate, and the default cookie store.""" response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(delegate, 'status', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(delegate, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) # response.raw = resp # response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Add new cookies from the server. For NSURLSession these have already been parsed. # jar = RequestsCookieJar() # for cookie in cookiestore.cookies(): # print cookie # c = Cookie( # version=cookie.version(), # name=cookie.name(), # value=cookie.value(), # port=8444, # # port=cookie.portList()[-1], # port_specified=0, # domain=cookie.domain(), # domain_specified=cookie.domain(), # domain_initial_dot=cookie.domain(), # path=cookie.path(), # path_specified=cookie.path(), # secure=!cookie.HTTPOnly(), # expires=cookie.expiresDate(), # comment=cookie.comment(), # comment_url=cookie.commentURL(), # ) # jar.set_cookie(c) # # response.cookies = jar response.raw = io.BytesIO(buffer(delegate.output)) # Give the Response some context. response.request = req # response.connection = self return response
def currency_rates(val_str): val_str = val_str.upper() response = get('http://www.cbr.ru/scripts/XML_daily.asp') encodings = utils.get_encoding_from_headers(response.headers) content = response.content.decode(encoding=encodings) if content.find(val_str) > -1: content = content[content.find(val_str):] course_beg = content.find('<Value>') + 7 course_end = content.find('</Value>') course = float(content[course_beg:course_end].replace(',', '.')) else: course = None return course
def currency_rates(currency): encodings = utils.get_encoding_from_headers(response.headers) content = response.content.decode(encoding=encodings) cur_list = content.split("<Valute ID=") for fragment in cur_list: if currency.upper() in fragment: start = fragment.find("<Value>") + 7 end = fragment.find("</Value>") result = fragment[start:end] rate = float(result.replace(",", ".")) print(rate) break if currency not in content: print(None)
def make_response( status_code: int = 200, content: bytes = b'', headers: dict = None, reason: str = None, encoding: str = None, ) -> Response: response = Response() response.status_code = status_code response._content = content response._content_consumed = True response.headers = CaseInsensitiveDict(headers or {}) response.encoding = encoding or get_encoding_from_headers(headers or {}) response.reason = reason return response
def html_encoding(response_inst): encoding = getattr(response_inst, 'encoding', None) if encoding is False: # 不使用编码, 可能是非文本页面 return if not encoding: html_text = response_inst.response.text[1:] # 去除bom标记 encodings = get_encodings_from_content(html_text) if encodings: encoding = encodings[0] if not encoding: encoding = get_encoding_from_headers(response_inst.response.headers) response_inst.response.encoding = encoding or Public_Constant.default_html_encoding
def _lambda_decode_reponse(self, lambda_response): """ Convert json blob returned by lambda into one that requests clients are used to. """ response = Response() response.status_code = lambda_response.get("statusCode", 502) response.reason = STATUS_CODES_TO_REASON_PHRASES[response.status_code] response.headers = lambda_response.get("headers", {}) response.encoding = get_encoding_from_headers(response.headers) if "body" in lambda_response: response.raw = _decode_payload(lambda_response, "body") elif "errorMessage" in lambda_response: response.raw = _decode_payload(lambda_response, "errorMessage") return response
def build_response(self, req, resp): response = PlexResponse() response.status_code = getattr(resp, 'status', None) response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url extract_cookies_to_jar(response.cookies, req, resp) response.request = req response.connection = self return response
def get_unicode_from_response(response): """Return the requested content back in unicode. This will first attempt to retrieve the encoding from the response headers. If that fails, it will use :func:`requests_toolbelt.utils.deprecated.get_encodings_from_content` to determine encodings from HTML elements. .. code-block:: python import requests from requests_toolbelt.utils import deprecated r = requests.get(url) text = deprecated.get_unicode_from_response(r) :param response: Response object to get unicode content from. :type response: requests.models.Response """ tried_encodings = set() # Try charset from content-type encoding = utils.get_encoding_from_headers(response.headers) if encoding: try: return str(response.content, encoding) except UnicodeError: tried_encodings.add(encoding.lower()) encodings = get_encodings_from_content(response.content) for _encoding in encodings: _encoding = _encoding.lower() if _encoding in tried_encodings: continue try: return str(response.content, _encoding) except UnicodeError: tried_encodings.add(_encoding) # Fall back: if encoding: try: return str(response.content, encoding, errors='replace') except TypeError: pass return response.text
def get_unicode_from_response(response): """Return the requested content back in unicode. This will first attempt to retrieve the encoding from the response headers. If that fails, it will use :func:`requests_toolbelt.utils.deprecated.get_encodings_from_content` to determine encodings from HTML elements. .. code-block:: python import requests from requests_toolbelt.utils import deprecated r = requests.get(url) text = deprecated.get_unicode_from_response(r) :param response: Response object to get unicode content from. :type response: requests.models.Response """ tried_encodings = set() # Try charset from content-type encoding = utils.get_encoding_from_headers(response.headers) if encoding: try: return str(response.content, encoding) except UnicodeError: tried_encodings.add(encoding.lower()) encodings = get_encodings_from_content(response.content) for _encoding in encodings: _encoding = _encoding.lower() if _encoding in tried_encodings: continue try: return str(response.content, _encoding) except UnicodeError: tried_encodings.add(_encoding) # Fall back: if encoding: try: return str(response.content, encoding, errors="replace") except TypeError: pass return response.text
def get_response(http_response: Dict[str, Any], http_headers: Dict[str, Any], is_raw_response: bool) -> Any: """Creates a fake `requests` Response with a desired HTTP response and response headers. """ mock_result = requests.Response() mock_result.status_code = http_headers.pop("status", 200) mock_result.headers.update(http_headers) mock_result.encoding = get_encoding_from_headers(mock_result.headers) if is_raw_response: mock_result._content = http_response.encode( ) # type: ignore # This modifies a "hidden" attribute. else: mock_result._content = json.dumps(http_response).encode() return mock_result
def get_cb(self, request, context): if self.head_cb(request, context) == b"Not Found": return relpath = self.relpath(request.path) if relpath == ".": if "limit" in request.qs: limit = int(request.qs["limit"][0]) assert limit > 0 else: limit = None items = sorted(self.content.items()) if items and "lastfilename" in request.qs: lastfilename = request.qs["lastfilename"][0] if lastfilename: # exclude all filenames up to lastfilename items = dropwhile(lambda kv: kv[0] <= lastfilename, items) if limit: # +1 to easily detect if there are more items = islice(items, limit + 1) entries = [{ "FullPath": os.path.join(request.path, fname), "Mode": self.MODE_FILE, } for fname, obj in items] thereismore = False if limit and len(entries) > limit: entries = entries[:limit] thereismore = True if entries: lastfilename = entries[-1]["FullPath"].split("/")[-1] else: lastfilename = None text = json.dumps({ "Path": request.path, "Limit": limit, "LastFileName": lastfilename, "ShouldDisplayLoadMore": thereismore, "Entries": entries, }) encoding = get_encoding_from_headers(request.headers) or "utf-8" return text.encode(encoding) else: return self.content[relpath]
def procdata_getencoding(seed,headers,content): code = utils.get_encoding_from_headers(headers) if code: if code.lower() == 'gbk' or code.lower() == 'gb2312': code = 'gbk' elif code.lower() == 'utf-8': code = 'utf-8' else: code = None if code == None: code = utils.get_encodings_from_content(content) print "content",seed,code if code: code = code[0] if code.lower() == 'gbk' or code.lower() == 'gb2312': code = 'gbk' return code
def filter_encoding(self,seed, headers,content): code = utils.get_encoding_from_headers(headers) if code: if code.lower() == 'gbk' or code.lower() == 'gb2312': code = 'gbk' return True elif code.lower() == 'utf-8' or code.lower() == 'utf8': code = 'utf8' # as for utf8, we should check the content else: # 'ISO-8859-1' and so on, code = None # chinese website may also miss the content-encoding header, so detect the content if code == None: codes = utils.get_encodings_from_content(content) if codes: for code in codes: if code.lower() in [ 'gbk','gb2312']: return True elif code.lower() == 'utf8' or code.lower() == 'utf-8': code = 'utf8' break if code != 'utf8': return False # here handle utf8 # to detect any chinese char win try: ucon = content.decode('utf8') for uchar in ucon: i = ord(uchar) if i >= 0x4e00 and i <= 0x9fa5: return True except Exception, e: print url, e pass
def build_response(self, req, resp): response = Response() response.status_code = resp.status_code response.headers = CaseInsensitiveDict((k, v) for k, v in resp.items()) response.encoding = get_encoding_from_headers(response.headers) response.raw = StringIO(resp.content) response.reason = None if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Convert from django's SimpleCookie to request's CookieJar cookiejar_from_dict(resp.cookies, response.cookies) # context response.request = req response.connection = self response = dispatch_hook('response', req.hooks, response) return response
def encoding(self): """ encoding of Response.content. if Response.encoding is None, encoding will be guessed by header or content or chardet if available. """ if hasattr(self, '_encoding'): return self._encoding # content is unicode if isinstance(self.content, six.text_type): return 'unicode' # Try charset from content-type encoding = get_encoding_from_headers(self.headers) if encoding == 'ISO-8859-1': encoding = None # Try charset from content if not encoding and get_encodings_from_content: if six.PY3: encoding = get_encodings_from_content(utils.pretty_unicode(self.content[:100])) else: encoding = get_encodings_from_content(self.content) encoding = encoding and encoding[0] or None # Fallback to auto-detected encoding. if not encoding and chardet is not None: encoding = chardet.detect(self.content[:600])['encoding'] if encoding and encoding.lower() == 'gb2312': encoding = 'gb18030' self._encoding = encoding or 'utf-8' return self._encoding
def start_response(status, headers): response.status_code = int(status.split(' ')[0]) response.headers = CaseInsensitiveDict(headers) response.encoding = get_encoding_from_headers(response.headers)
def build_response(self, request, resp): """ Builds a Requests' response object. This emulates most of the logic of the standard fuction but deals with the lack of the ``.headers`` property on the HTTP20Response object. Additionally, this function builds in a number of features that are purely for HTTPie. This is to allow maximum compatibility with what urllib3 does, so that HTTPie doesn't fall over when it uses us. """ response = Response() response.status_code = resp.status response.headers = CaseInsensitiveDict(resp.headers.iter_raw()) response.raw = resp response.reason = resp.reason response.encoding = get_encoding_from_headers(response.headers) extract_cookies_to_jar(response.cookies, request, response) response.url = request.url response.request = request response.connection = self # First horrible patch: Requests expects its raw responses to have a # release_conn method, which I don't. We should monkeypatch a no-op on. resp.release_conn = lambda: None # Next, add the things HTTPie needs. It needs the following things: # # - The `raw` object has a property called `_original_response` that is # a `httplib` response object. # - `raw._original_response` has three simple properties: `version`, # `status`, `reason`. # - `raw._original_response.version` has one of three values: `9`, # `10`, `11`. # - `raw._original_response.msg` exists. # - `raw._original_response.msg._headers` exists and is an iterable of # two-tuples. # # We fake this out. Most of this exists on our response object already, # and the rest can be faked. # # All of this exists for httpie, which I don't have any tests for, # so I'm not going to bother adding test coverage for it. class FakeOriginalResponse(object): # pragma: no cover def __init__(self, headers): self._headers = headers def get_all(self, name, default=None): values = [] for n, v in self._headers: if n == name.lower(): values.append(v) if not values: return default return values def getheaders(self, name): return self.get_all(name, []) response.raw._original_response = orig = FakeOriginalResponse(None) orig.version = 20 orig.status = resp.status orig.reason = resp.reason orig.msg = FakeOriginalResponse(resp.headers.iter_raw()) return response
def fetch_release_file(filename, release, dist=None): cache_key = 'releasefile:v1:%s:%s' % (release.id, md5_text(filename).hexdigest(), ) logger.debug('Checking cache for release artifact %r (release_id=%s)', filename, release.id) result = cache.get(cache_key) dist_name = dist and dist.name or None if result is None: filename_choices = ReleaseFile.normalize(filename) filename_idents = [ReleaseFile.get_ident(f, dist_name) for f in filename_choices] logger.debug( 'Checking database for release artifact %r (release_id=%s)', filename, release.id ) possible_files = list( ReleaseFile.objects.filter( release=release, dist=dist, ident__in=filename_idents, ).select_related('file') ) if len(possible_files) == 0: logger.debug( 'Release artifact %r not found in database (release_id=%s)', filename, release.id ) cache.set(cache_key, -1, 60) return None elif len(possible_files) == 1: releasefile = possible_files[0] else: # Pick first one that matches in priority order. # This is O(N*M) but there are only ever at most 4 things here # so not really worth optimizing. releasefile = next(( rf for ident in filename_idents for rf in possible_files if rf.ident == ident )) logger.debug( 'Found release artifact %r (id=%s, release_id=%s)', filename, releasefile.id, release.id ) try: with metrics.timer('sourcemaps.release_file_read'): with releasefile.file.getfile() as fp: z_body, body = compress_file(fp) except Exception: logger.error('sourcemap.compress_read_failed', exc_info=sys.exc_info()) result = None else: headers = {k.lower(): v for k, v in releasefile.file.headers.items()} encoding = get_encoding_from_headers(headers) result = http.UrlResult(filename, headers, body, 200, encoding) cache.set(cache_key, (headers, z_body, 200, encoding), 3600) elif result == -1: # We cached an error, so normalize # it down to None result = None else: # Previous caches would be a 3-tuple instead of a 4-tuple, # so this is being maintained for backwards compatibility try: encoding = result[3] except IndexError: encoding = None result = http.UrlResult( filename, result[0], zlib.decompress(result[1]), result[2], encoding ) return result
def test_get_encoding_from_headers(value, expected): assert get_encoding_from_headers(value) == expected
def fetch_release_file(filename, release): cache_key = 'releasefile:v1:%s:%s' % ( release.id, md5_text(filename).hexdigest(), ) filename_path = None if filename is not None: # Reconstruct url without protocol + host # e.g. http://example.com/foo?bar => ~/foo?bar parsed_url = urlparse(filename) filename_path = '~' + parsed_url.path if parsed_url.query: filename_path += '?' + parsed_url.query logger.debug('Checking cache for release artifact %r (release_id=%s)', filename, release.id) result = cache.get(cache_key) if result is None: logger.debug('Checking database for release artifact %r (release_id=%s)', filename, release.id) filename_idents = [ReleaseFile.get_ident(filename)] if filename_path is not None and filename_path != filename: filename_idents.append(ReleaseFile.get_ident(filename_path)) possible_files = list(ReleaseFile.objects.filter( release=release, ident__in=filename_idents, ).select_related('file')) if len(possible_files) == 0: logger.debug('Release artifact %r not found in database (release_id=%s)', filename, release.id) cache.set(cache_key, -1, 60) return None elif len(possible_files) == 1: releasefile = possible_files[0] else: # Prioritize releasefile that matches full url (w/ host) # over hostless releasefile target_ident = filename_idents[0] releasefile = next((f for f in possible_files if f.ident == target_ident)) logger.debug('Found release artifact %r (id=%s, release_id=%s)', filename, releasefile.id, release.id) try: with metrics.timer('sourcemaps.release_file_read'): with releasefile.file.getfile() as fp: z_body, body = compress_file(fp) except Exception as e: logger.exception(six.text_type(e)) cache.set(cache_key, -1, 3600) result = None else: headers = {k.lower(): v for k, v in releasefile.file.headers.items()} encoding = get_encoding_from_headers(headers) result = (headers, body, 200, encoding) cache.set(cache_key, (headers, z_body, 200, encoding), 3600) elif result == -1: # We cached an error, so normalize # it down to None result = None else: # Previous caches would be a 3-tuple instead of a 4-tuple, # so this is being maintained for backwards compatibility try: encoding = result[3] except IndexError: encoding = None result = (result[0], zlib.decompress(result[1]), result[2], encoding) return result