def from_file(cls, fp): """Reads a response from a file-like object (it must implement ``.read(size)`` and ``.readline()``). It will read up to the end of the response, not the end of the file. This reads the response as represented by ``str(resp)``; it may not read every valid HTTP response properly. Responses must have a ``Content-Length``""" headerlist = [] status = fp.readline().strip() is_text = isinstance(status, text_type) if is_text: _colon = ':' _http = 'HTTP/' else: _colon = b':' _http = b'HTTP/' if status.startswith(_http): (http_ver, status_num, status_text) = status.split(None, 2) status = '%s %s' % (native_(status_num), native_(status_text)) while 1: line = fp.readline().strip() if not line: # end of headers break try: header_name, value = line.split(_colon, 1) except ValueError: raise ValueError('Bad header line: %r' % line) value = value.strip() headerlist.append(( native_(header_name, 'latin-1'), native_(value, 'latin-1') )) r = cls( status=status, headerlist=headerlist, app_iter=(), ) body = fp.read(r.content_length or 0) if is_text: r.text = body else: r.body = body return r
def from_file(cls, fp): """Reads a response from a file-like object (it must implement ``.read(size)`` and ``.readline()``). It will read up to the end of the response, not the end of the file. This reads the response as represented by ``str(resp)``; it may not read every valid HTTP response properly. Responses must have a ``Content-Length``""" headerlist = [] status = fp.readline().strip() is_text = isinstance(status, text_type) if is_text: _colon = ':' _http = 'HTTP/' else: _colon = b':' _http = b'HTTP/' if status.startswith(_http): (http_ver, status_num, status_text) = status.split(None, 2) status = '%s %s' % (native_(status_num), native_(status_text)) while 1: line = fp.readline().strip() if not line: # end of headers break try: header_name, value = line.split(_colon, 1) except ValueError: raise ValueError('Bad header line: %r' % line) value = value.strip() headerlist.append((native_(header_name, 'latin-1'), native_(value, 'latin-1'))) r = cls( status=status, headerlist=headerlist, app_iter=(), ) body = fp.read(r.content_length or 0) if is_text: r.text = body else: r.body = body return r
def test__setitem__success_append(self): value = native_(b'test_cookie', 'utf-8') environ = {'HTTP_COOKIE':'a=1; b=2'} inst = self._makeOne(environ) inst['c'] = value self.assertEqual( environ['HTTP_COOKIE'], 'a=1; b=2; c=test_cookie')
def serialize(self, full=True): result = [] add = result.append add(self.name + b"=" + _value_quote(self.value)) if full: for k in _c_valkeys: v = self[k] if v: info = _c_renames[k] name = info["name"] quoter = info["quoter"] add(name + b"=" + quoter(v)) expires = self[b"expires"] if expires: add(b"expires=" + expires) if self.secure: add(b"secure") if self.httponly: add(b"HttpOnly") if self.samesite: add(b"SameSite=" + self.samesite) return native_(b"; ".join(result), "ascii")
def serialize(self, full=True): result = [] add = result.append add(self.name + b"=" + _value_quote(self.value)) if full: for k in _c_valkeys: v = self[k] if v: info = _c_renames[k] name = info["name"] quoter = info["quoter"] add(name + b"=" + quoter(v)) expires = self[b"expires"] if expires: add(b"expires=" + expires) if self.secure: add(b"secure") if self.httponly: add(b"HttpOnly") if self.samesite: if not self.secure and self.samesite.lower() == b"none": raise ValueError( "Incompatible cookie attributes: " "when the samesite equals 'none', then the secure must be True" ) add(b"SameSite=" + self.samesite) return native_(b"; ".join(result), "ascii")
def test__setitem__success_append(self): value = native_(b'La Pe\xc3\xb1a', 'utf-8') environ = {'HTTP_COOKIE':'a=1; b=2'} inst = self._makeOne(environ) inst['c'] = value self.assertEqual( environ['HTTP_COOKIE'], 'a=1; b=2; c="La Pe\\303\\261a"')
def test__setitem__success_append(self): value = native_(b'La Pe\xc3\xb1a', 'utf-8') environ = {'HTTP_COOKIE': 'a=1; b=2'} inst = self._makeOne(environ) inst['c'] = value self.assertEqual(environ['HTTP_COOKIE'], 'a=1; b=2; c="La Pe\\303\\261a"')
def test_fget_nonascii(self): desc = self._callFUT('HTTP_X_AKEY', encattr='url_encoding') req = self._makeRequest() if PY3: req.environ['HTTP_X_AKEY'] = b'\xc3\xab'.decode('latin-1') else: req.environ['HTTP_X_AKEY'] = b'\xc3\xab' result = desc.fget(req) assert result == native_(b'\xc3\xab', 'latin-1')
def test_fget_nonascii(self): desc = self._callFUT("HTTP_X_AKEY", encattr="url_encoding") req = self._makeRequest() if PY3: req.environ["HTTP_X_AKEY"] = b"\xc3\xab".decode("latin-1") else: req.environ["HTTP_X_AKEY"] = b"\xc3\xab" result = desc.fget(req) self.assertEqual(result, native_(b"\xc3\xab", "latin-1"))
def serialize_date(dt): if isinstance(dt, (bytes, text_type)): return native_(dt) if isinstance(dt, timedelta): dt = _now() + dt if isinstance(dt, (datetime, date)): dt = dt.timetuple() if isinstance(dt, (tuple, time.struct_time)): dt = calendar.timegm(dt) if not (isinstance(dt, float) or isinstance(dt, integer_types)): raise ValueError( "You must pass in a datetime, date, time tuple, or integer object, " "not %r" % dt) return formatdate(dt, usegmt=True)
def parse_date(value): if not value: return None try: value = native_(value) except: return None t = parsedate_tz(value) if t is None: # Could not parse return None if t[-1] is None: # No timezone given. None would mean local time, but we'll force UTC t = t[:9] + (0,) t = mktime_tz(t) return datetime.fromtimestamp(t, UTC)
def parse_date(value): if not value: return None try: value = native_(value) except: return None t = parsedate_tz(value) if t is None: # Could not parse return None if t[-1] is None: # No timezone given. None would mean local time, but we'll force UTC t = t[:9] + (0, ) t = mktime_tz(t) return datetime.fromtimestamp(t, UTC)
def serialize(self, full=True): result = [] add = result.append add(self.name + b'=' + _quote(self.value)) if full: for k in _c_valkeys: v = self[k] if v: add(_c_renames[k]+b'='+_quote(v)) expires = self[b'expires'] if expires: add(b'expires=' + expires) if self.secure: add(b'secure') if self.httponly: add(b'HttpOnly') return native_(b'; '.join(result), 'ascii')
def serialize(self, full=True): result = [] add = result.append add(self.name + b'=' + _quote(self.value)) if full: for k in _c_valkeys: v = self[k] if v: add(_c_renames[k] + b'=' + _quote(v)) expires = self[b'expires'] if expires: add(b'expires=' + expires) if self.secure: add(b'secure') if self.httponly: add(b'HttpOnly') return native_(b'; '.join(result), 'ascii')
def md5_etag(self, body=None, set_content_md5=False): """ Generate an etag for the response object using an MD5 hash of the body (the body parameter, or ``self.body`` if not given) Sets ``self.etag`` If ``set_content_md5`` is True sets ``self.content_md5`` as well """ if body is None: body = self.body md5_digest = md5(body).digest() md5_digest = b64encode(md5_digest) md5_digest = md5_digest.replace(b'\n', b'') md5_digest = native_(md5_digest) self.etag = md5_digest.strip('=') if set_content_md5: self.content_md5 = md5_digest
def _mutate_header(self, name, value): header = self._environ.get("HTTP_COOKIE") had_header = header is not None header = header or "" if not PY2: header = header.encode("latin-1") bytes_name = bytes_(name, "ascii") if value is None: replacement = None else: bytes_val = _value_quote(bytes_(value, "utf-8")) replacement = bytes_name + b"=" + bytes_val matches = _rx_cookie.finditer(header) found = False for match in matches: start, end = match.span() match_name = match.group(1) if match_name == bytes_name: found = True if replacement is None: # remove value header = header[:start].rstrip(b" ;") + header[end:] else: # replace value header = header[:start] + replacement + header[end:] break else: if replacement is not None: if header: header += b"; " + replacement else: header = replacement if header: self._environ["HTTP_COOKIE"] = native_(header, "latin-1") elif had_header: self._environ["HTTP_COOKIE"] = "" return found
def serialize(self, full=True): result = [] add = result.append add(self.name + b'=' + _value_quote(self.value)) if full: for k in _c_valkeys: v = self[k] if v: info = _c_renames[k] name = info['name'] quoter = info['quoter'] add(name + b'=' + quoter(v)) expires = self[b'expires'] if expires: add(b'expires=' + expires) if self.secure: add(b'secure') if self.httponly: add(b'HttpOnly') if self.samesite: add(b'SameSite=' + self.samesite) return native_(b'; '.join(result), 'ascii')
def _mutate_header(self, name, value): header = self._environ.get('HTTP_COOKIE') had_header = header is not None header = header or '' if PY3: # pragma: no cover header = header.encode('latin-1') bytes_name = bytes_(name, 'ascii') if value is None: replacement = None else: bytes_val = _value_quote(bytes_(value, 'utf-8')) replacement = bytes_name + b'=' + bytes_val matches = _rx_cookie.finditer(header) found = False for match in matches: start, end = match.span() match_name = match.group(1) if match_name == bytes_name: found = True if replacement is None: # remove value header = header[:start].rstrip(b' ;') + header[end:] else: # replace value header = header[:start] + replacement + header[end:] break else: if replacement is not None: if header: header += b'; ' + replacement else: header = replacement if header: self._environ['HTTP_COOKIE'] = native_(header, 'latin-1') elif had_header: self._environ['HTTP_COOKIE'] = '' return found
def create_response(headers, body, status): headerlist = [(native_(k), native_(v)) for k, v in headers.items()] return Response(body=body, status=status, headerlist=headerlist, charset='utf8')
def test__setitem__success_no_existing_headers(self): value = native_(b'La Pe\xc3\xb1a', 'utf-8') environ = {} inst = self._makeOne(environ) inst['a'] = value self.assertEqual(environ['HTTP_COOKIE'], 'a="La Pe\\303\\261a"')
def test__setitem__success_append(self): value = native_(b"test_cookie", "utf-8") environ = {"HTTP_COOKIE": "a=1; b=2"} inst = self._makeOne(environ) inst["c"] = value assert environ["HTTP_COOKIE"] == "a=1; b=2; c=test_cookie"
def create_response(headers, body, status): headerlist = [(native_(k), native_(v)) for k, v in headers.items()] return Response(body=body, status=status, headerlist=headerlist)
def test_transcode_non_multipart(): req = Request.blank('/?a', POST='%EF%F0%E8=%E2%E5%F2') req._content_type_raw = 'application/x-www-form-urlencoded' req2 = req.decode('cp1251') assert native_(req2.body) == '%D0%BF%D1%80%D0%B8=%D0%B2%D0%B5%D1%82'
def test__setitem__success_no_existing_headers(self): value = native_(b"test_cookie", "utf-8") environ = {} inst = self._makeOne(environ) inst["a"] = value assert environ["HTTP_COOKIE"] == "a=test_cookie"
def test_transcode_non_form(): req = Request.blank('/?a', POST='%EF%F0%E8=%E2%E5%F2') req._content_type_raw = 'application/x-foo' req2 = req.decode('cp1251') assert native_(req2.body) == '%EF%F0%E8=%E2%E5%F2'
def response_from_file(cls, fp, block_size=1 << 16): # 64KB """Reads a response from a file-like object (it must implement ``.read(size)`` and ``.readline()``). It will read up to the end of the response, not the end of the file. This reads the response as represented by ``str(resp)``; it may not read every valid HTTP response properly. Responses must have a ``Content-Length``""" headerlist = [] status_line = fp.readline().strip() if not status_line: return None is_text = isinstance(status_line, text_type) if is_text: _colon = ':' else: _colon = b':' if not status_line.startswith(b'HTTP/1.1 '): raise ValueError("malformed status line, expected: 'HTTP/1.1 ', got: %r" % status_line) http_version, status = status_line.split(None, 1) chunked = False keep_alive = True while 1: line = fp.readline() if not line: raise ValueError('missing CRLF terminating headers') line = line.strip() if not line: # end of headers break try: header_name, value = line.split(_colon, 1) except ValueError: raise ValueError('bad header line: %r' % (line)) value = value.strip() if not is_text: header_name = native_(header_name, 'utf-8') value = native_(value, 'utf-8') header_name_lower = header_name.lower() if header_name_lower == 'transfer-encoding': value = value.lower() if value == 'chunked': chunked = True elif value == 'identity': pass else: raise ValueError('unsupported Transfer-Encoding: %s' % value) elif header_name_lower == 'connection': value = value.lower() if value == 'close': keep_alive = False elif value == 'keep-alive': pass else: raise ValueError('unsupported Connection: %s' % value) else: headerlist.append((header_name, value)) r = cls( status=status, headerlist=headerlist, app_iter=[], ) if chunked: end_chunk = False while not end_chunk: line = fp.readline() if not line: break # EOF try: chunk_size = int(line, 16) except ValueError: raise ValueError('invalid chunk header') if chunk_size == 0: end_chunk = True remaining = chunk_size chunk = fp.read(min(remaining, block_size)) while chunk: r.app_iter.append(chunk) remaining -= len(chunk) chunk = fp.read(min(remaining, block_size)) if remaining: raise ValueError('EOF while reading chunk') line = fp.readline() if not line: raise ValueError('missing CRLF terminating chunk') if line.strip(): raise ValueError('chunk too long') elif r.content_length is not None: remaining = r.content_length chunk = fp.read(min(remaining, block_size)) while chunk: r.app_iter.append(chunk) remaining -= len(chunk) chunk = fp.read(min(remaining, block_size)) if remaining: raise ValueError('EOF while reading body') elif keep_alive: raise ValueError('missing Content-Length') else: chunk = fp.read(block_size) while chunk: r.app_iter.append(chunk) chunk = fp.read(block_size) return r
def test_fset_nonascii(self): desc = self._callFUT("HTTP_X_AKEY", encattr="url_encoding") req = self._makeRequest() desc.fset(req, text_(b"\xc3\xab", "utf-8")) assert req.environ["HTTP_X_AKEY"] == native_(b"\xc3\xab", "latin-1")
def test_fset_nonascii(self): desc = self._callFUT('HTTP_X_AKEY', encattr='url_encoding') req = self._makeRequest() desc.fset(req, text_(b'\xc3\xab', 'utf-8')) assert req.environ['HTTP_X_AKEY'] == native_(b'\xc3\xab', 'latin-1')
def test_fget_nonascii(self): desc = self._callFUT('HTTP_X_AKEY', encattr='url_encoding') req = self._makeRequest() req.environ['HTTP_X_AKEY'] = native_(b'\xc3\xab') result = desc.fget(req) assert result == text_(b'\xc3\xab', 'utf-8')
def test___setitem__name_not_encodeable_to_ascii(self): name = native_(b'La Pe\xc3\xb1a', 'utf-8') inst = self._makeOne({}) with pytest.raises(TypeError): inst.__setitem__(name, 'abc')
def test__setitem__success_no_existing_headers(self): value = native_(b'test_cookie', 'utf-8') environ = {} inst = self._makeOne(environ) inst['a'] = value self.assertEqual(environ['HTTP_COOKIE'], 'a=test_cookie')
def test_transcode_non_multipart(): req = Request.blank("/?a", POST="%EF%F0%E8=%E2%E5%F2") req._content_type_raw = "application/x-www-form-urlencoded" req2 = req.decode("cp1251") assert native_(req2.body) == "%D0%BF%D1%80%D0%B8=%D0%B2%D0%B5%D1%82"
def test_transcode_non_form(): req = Request.blank("/?a", POST="%EF%F0%E8=%E2%E5%F2") req._content_type_raw = "application/x-foo" req2 = req.decode("cp1251") assert native_(req2.body) == "%EF%F0%E8=%E2%E5%F2"
def response_from_file(cls, fp, block_size=1 << 16): # 64KB """Reads a response from a file-like object (it must implement ``.read(size)`` and ``.readline()``). It will read up to the end of the response, not the end of the file. This reads the response as represented by ``str(resp)``; it may not read every valid HTTP response properly. Responses must have a ``Content-Length``""" headerlist = [] status_line = fp.readline().strip() if not status_line: return None is_text = isinstance(status_line, text_type) if is_text: _colon = ':' else: _colon = b':' if not status_line.startswith(b'HTTP/1.1 '): raise ValueError( "malformed status line, expected: 'HTTP/1.1 ', got: %r" % status_line) http_version, status = status_line.split(None, 1) chunked = False keep_alive = True while 1: line = fp.readline() if not line: raise ValueError('missing CRLF terminating headers') line = line.strip() if not line: # end of headers break try: header_name, value = line.split(_colon, 1) except ValueError: raise ValueError('bad header line: %r' % (line)) value = value.strip() if not is_text: header_name = native_(header_name, 'utf-8') value = native_(value, 'utf-8') header_name_lower = header_name.lower() if header_name_lower == 'transfer-encoding': value = value.lower() if value == 'chunked': chunked = True elif value == 'identity': pass else: raise ValueError('unsupported Transfer-Encoding: %s' % value) elif header_name_lower == 'connection': value = value.lower() if value == 'close': keep_alive = False elif value == 'keep-alive': pass else: raise ValueError('unsupported Connection: %s' % value) else: headerlist.append((header_name, value)) r = cls( status=status, headerlist=headerlist, app_iter=[], ) if chunked: end_chunk = False while not end_chunk: line = fp.readline() if not line: break # EOF try: chunk_size = int(line, 16) except ValueError: raise ValueError('invalid chunk header') if chunk_size == 0: end_chunk = True remaining = chunk_size chunk = fp.read(min(remaining, block_size)) while chunk: r.app_iter.append(chunk) remaining -= len(chunk) chunk = fp.read(min(remaining, block_size)) if remaining: raise ValueError('EOF while reading chunk') line = fp.readline() if not line: raise ValueError('missing CRLF terminating chunk') if line.strip(): raise ValueError('chunk too long') elif r.content_length is not None: remaining = r.content_length chunk = fp.read(min(remaining, block_size)) while chunk: r.app_iter.append(chunk) remaining -= len(chunk) chunk = fp.read(min(remaining, block_size)) if remaining: raise ValueError('EOF while reading body') elif keep_alive: raise ValueError('missing Content-Length') else: chunk = fp.read(block_size) while chunk: r.app_iter.append(chunk) chunk = fp.read(block_size) return r
def __repr__(self): return '<%s: %s=%r>' % (self.__class__.__name__, native_( self.name), native_(self.value))
def __repr__(self): return '<%s: %s=%r>' % (self.__class__.__name__, native_(self.name), native_(self.value) )
def test_create_response(self): response = create_response({'Content-Type': 'text/html'}, 'body', 200) self.assertEqual(response.status, '200 OK') self.assertEqual(response.headers[native_('Content-Type')], native_('text/html')) self.assertEqual(response.body, 'body'.encode('utf-8'))
def test___setitem__name_not_encodeable_to_ascii(self): name = native_(b'La Pe\xc3\xb1a', 'utf-8') inst = self._makeOne({}) self.assertRaises(TypeError, inst.__setitem__, name, 'abc')
def test_fget_nonascii(self): desc = self._callFUT("HTTP_X_AKEY", encattr="url_encoding") req = self._makeRequest() req.environ["HTTP_X_AKEY"] = native_(b"\xc3\xab", "latin-1") result = desc.fget(req) assert result == native_(b"\xc3\xab", "latin-1")