Пример #1
0
class FakePayload:
    """
    A wrapper around BytesIO that restricts what can be read since data from
    the network can't be seeked and cannot be read outside of its content
    length. This makes sure that views can't do anything under the test client
    that wouldn't work in real life.
    """
    def __init__(self, content=None):
        self.__content = BytesIO()
        self.__len = 0
        self.read_started = False
        if content is not None:
            self.write(content)

    def __len__(self):
        return self.__len

    def read(self, num_bytes=None):
        if not self.read_started:
            self.__content.seek(0)
            self.read_started = True
        if num_bytes is None:
            num_bytes = self.__len or 0
        assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
        content = self.__content.read(num_bytes)
        self.__len -= num_bytes
        return content

    def write(self, content):
        if self.read_started:
            raise ValueError("Unable to write a payload after he's been read")
        content = force_bytes(content)
        self.__content.write(content)
        self.__len += len(content)
Пример #2
0
def compress_kml(kml):
    "Return compressed KMZ from the given KML string."
    kmz = BytesIO()
    with zipfile.ZipFile(kmz, 'a', zipfile.ZIP_DEFLATED) as zf:
        zf.writestr('doc.kml', kml.encode(settings.DEFAULT_CHARSET))
    kmz.seek(0)
    return kmz.read()
Пример #3
0
    def _load(self):
        palette = self._read_palette()

        data = bytearray()
        self.fd.seek(self._blp_offsets[0])

        if self._blp_compression == 1:
            # Uncompressed or DirectX compression

            if self._blp_encoding == BLP_ENCODING_UNCOMPRESSED:
                _data = BytesIO(self.fd.read(self._blp_lengths[0]))
                while True:
                    try:
                        offset, = struct.unpack("<B", _data.read(1))
                    except struct.error:
                        break
                    b, g, r, a = palette[offset]
                    data.extend((r, g, b))

            elif self._blp_encoding == BLP_ENCODING_DXT:
                if self._blp_alpha_encoding == BLP_ALPHA_ENCODING_DXT1:
                    linesize = (self.size[0] + 3) // 4 * 8
                    for yb in range((self.size[1] + 3) // 4):
                        for d in decode_dxt1(self.fd.read(linesize),
                                             alpha=bool(
                                                 self._blp_alpha_depth)):
                            data += d

                elif self._blp_alpha_encoding == BLP_ALPHA_ENCODING_DXT3:
                    linesize = (self.size[0] + 3) // 4 * 16
                    for yb in range((self.size[1] + 3) // 4):
                        for d in decode_dxt3(self.fd.read(linesize)):
                            data += d

                elif self._blp_alpha_encoding == BLP_ALPHA_ENCODING_DXT5:
                    linesize = (self.size[0] + 3) // 4 * 16
                    for yb in range((self.size[1] + 3) // 4):
                        for d in decode_dxt5(self.fd.read(linesize)):
                            data += d
                else:
                    raise BLPFormatError("Unsupported alpha encoding %r" %
                                         (self._blp_alpha_encoding))
            else:
                raise BLPFormatError("Unknown BLP encoding %r" %
                                     (self._blp_encoding))

        else:
            raise BLPFormatError("Unknown BLP compression %r" %
                                 (self._blp_compression))

        self.set_as_raw(bytes(data))
Пример #4
0
 def readline(self, size=None):
     while b'\n' not in self.buffer and \
           (size is None or len(self.buffer) < size):
         if size:
             # since size is not None here, len(self.buffer) < size
             chunk = self._read_limited(size - len(self.buffer))
         else:
             chunk = self._read_limited()
         if not chunk:
             break
         self.buffer += chunk
     sio = BytesIO(self.buffer)
     if size:
         line = sio.readline(size)
     else:
         line = sio.readline()
     self.buffer = sio.read()
     return line
Пример #5
0
def _save(im, fp, filename):
    fp.write(_MAGIC)  # (2+2)
    sizes = im.encoderinfo.get(
        "sizes",
        [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128),
         (256, 256)],
    )
    width, height = im.size
    sizes = filter(
        lambda x: False if
        (x[0] > width or x[1] > height or x[0] > 256 or x[1] > 256) else True,
        sizes,
    )
    sizes = list(sizes)
    fp.write(struct.pack("<H", len(sizes)))  # idCount(2)
    offset = fp.tell() + len(sizes) * 16
    for size in sizes:
        width, height = size
        # 0 means 256
        fp.write(struct.pack("B", width if width < 256 else 0))  # bWidth(1)
        fp.write(struct.pack("B", height if height < 256 else 0))  # bHeight(1)
        fp.write(b"\0")  # bColorCount(1)
        fp.write(b"\0")  # bReserved(1)
        fp.write(b"\0\0")  # wPlanes(2)
        fp.write(struct.pack("<H", 32))  # wBitCount(2)

        image_io = BytesIO()
        tmp = im.copy()
        tmp.thumbnail(size, Image.LANCZOS)
        tmp.save(image_io, "png")
        image_io.seek(0)
        image_bytes = image_io.read()
        bytes_len = len(image_bytes)
        fp.write(struct.pack("<I", bytes_len))  # dwBytesInRes(4)
        fp.write(struct.pack("<I", offset))  # dwImageOffset(4)
        current = fp.tell()
        fp.seek(offset)
        fp.write(image_bytes)
        offset = offset + bytes_len
        fp.seek(current)
Пример #6
0
    def _load(self):
        if self._blp_compression == BLP_FORMAT_JPEG:
            self._decode_jpeg_stream()

        elif self._blp_compression == 1:
            if self._blp_encoding in (4, 5):
                data = bytearray()
                palette = self._read_palette()
                _data = BytesIO(self.fd.read(self._blp_lengths[0]))
                while True:
                    try:
                        offset, = struct.unpack("<B", _data.read(1))
                    except struct.error:
                        break
                    b, g, r, a = palette[offset]
                    data.extend([r, g, b])

                self.set_as_raw(bytes(data))
            else:
                raise BLPFormatError("Unsupported BLP encoding %r" %
                                     (self._blp_encoding))
        else:
            raise BLPFormatError("Unsupported BLP compression %r" %
                                 (self._blp_encoding))
Пример #7
0
class HttpRequest:
    """A basic HTTP request."""

    # The encoding used in GET/POST dicts. None means use default setting.
    _encoding = None
    _upload_handlers = []

    def __init__(self):
        # WARNING: The `WSGIRequest` subclass doesn't call `super`.
        # Any variable assignment made here should also happen in
        # `WSGIRequest.__init__()`.

        self.GET = QueryDict(mutable=True)
        self.POST = QueryDict(mutable=True)
        self.COOKIES = {}
        self.META = {}
        self.FILES = MultiValueDict()

        self.path = ''
        self.path_info = ''
        self.method = None
        self.resolver_match = None
        self.content_type = None
        self.content_params = None

    def __repr__(self):
        if self.method is None or not self.get_full_path():
            return '<%s>' % self.__class__.__name__
        return '<%s: %s %r>' % (self.__class__.__name__, self.method,
                                self.get_full_path())

    @cached_property
    def headers(self):
        return HttpHeaders(self.META)

    def _get_raw_host(self):
        """
        Return the HTTP host using the environment or request headers. Skip
        allowed hosts protection, so may return an insecure host.
        """
        # We try three options, in order of decreasing preference.
        if settings.USE_X_FORWARDED_HOST and ('HTTP_X_FORWARDED_HOST'
                                              in self.META):
            host = self.META['HTTP_X_FORWARDED_HOST']
        elif 'HTTP_HOST' in self.META:
            host = self.META['HTTP_HOST']
        else:
            # Reconstruct the host using the algorithm from PEP 333.
            host = self.META['SERVER_NAME']
            server_port = self.get_port()
            if server_port != ('443' if self.is_secure() else '80'):
                host = '%s:%s' % (host, server_port)
        return host

    def get_host(self):
        """Return the HTTP host using the environment or request headers."""
        host = self._get_raw_host()

        # Allow variants of localhost if ALLOWED_HOSTS is empty and DEBUG=True.
        allowed_hosts = settings.ALLOWED_HOSTS
        if settings.DEBUG and not allowed_hosts:
            allowed_hosts = ['localhost', '127.0.0.1', '[::1]']

        domain, port = split_domain_port(host)
        if domain and validate_host(domain, allowed_hosts):
            return host
        else:
            msg = "Invalid HTTP_HOST header: %r." % host
            if domain:
                msg += " You may need to add %r to ALLOWED_HOSTS." % domain
            else:
                msg += " The domain name provided is not valid according to RFC 1034/1035."
            raise DisallowedHost(msg)

    def get_port(self):
        """Return the port number for the request as a string."""
        if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.META:
            port = self.META['HTTP_X_FORWARDED_PORT']
        else:
            port = self.META['SERVER_PORT']
        return str(port)

    def get_full_path(self, force_append_slash=False):
        return self._get_full_path(self.path, force_append_slash)

    def get_full_path_info(self, force_append_slash=False):
        return self._get_full_path(self.path_info, force_append_slash)

    def _get_full_path(self, path, force_append_slash):
        # RFC 3986 requires query string arguments to be in the ASCII range.
        # Rather than crash if this doesn't happen, we encode defensively.
        return '%s%s%s' % (escape_uri_path(path), '/' if force_append_slash
                           and not path.endswith('/') else '',
                           ('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))
                            ) if self.META.get('QUERY_STRING', '') else '')

    def get_signed_cookie(self,
                          key,
                          default=RAISE_ERROR,
                          salt='',
                          max_age=None):
        """
        Attempt to return a signed cookie. If the signature fails or the
        cookie has expired, raise an exception, unless the `default` argument
        is provided,  in which case return that value.
        """
        try:
            cookie_value = self.COOKIES[key]
        except KeyError:
            if default is not RAISE_ERROR:
                return default
            else:
                raise
        try:
            value = signing.get_cookie_signer(salt=key + salt).unsign(
                cookie_value, max_age=max_age)
        except signing.BadSignature:
            if default is not RAISE_ERROR:
                return default
            else:
                raise
        return value

    def get_raw_uri(self):
        """
        Return an absolute URI from variables available in this request. Skip
        allowed hosts protection, so may return insecure URI.
        """
        return '{scheme}://{host}{path}'.format(
            scheme=self.scheme,
            host=self._get_raw_host(),
            path=self.get_full_path(),
        )

    def build_absolute_uri(self, location=None):
        """
        Build an absolute URI from the location and the variables available in
        this request. If no ``location`` is specified, build the absolute URI
        using request.get_full_path(). If the location is absolute, convert it
        to an RFC 3987 compliant URI and return it. If location is relative or
        is scheme-relative (i.e., ``//example.com/``), urljoin() it to a base
        URL constructed from the request variables.
        """
        if location is None:
            # Make it an absolute url (but schemeless and domainless) for the
            # edge case that the path starts with '//'.
            location = '//%s' % self.get_full_path()
        bits = urlsplit(location)
        if not (bits.scheme and bits.netloc):
            # Handle the simple, most common case. If the location is absolute
            # and a scheme or host (netloc) isn't provided, skip an expensive
            # urljoin() as long as no path segments are '.' or '..'.
            if (bits.path.startswith('/') and not bits.scheme
                    and not bits.netloc and '/./' not in bits.path
                    and '/../' not in bits.path):
                # If location starts with '//' but has no netloc, reuse the
                # schema and netloc from the current request. Strip the double
                # slashes and continue as if it wasn't specified.
                if location.startswith('//'):
                    location = location[2:]
                location = self._current_scheme_host + location
            else:
                # Join the constructed URL with the provided location, which
                # allows the provided location to apply query strings to the
                # base path.
                location = urljoin(self._current_scheme_host + self.path,
                                   location)
        return iri_to_uri(location)

    @cached_property
    def _current_scheme_host(self):
        return '{}://{}'.format(self.scheme, self.get_host())

    def _get_scheme(self):
        """
        Hook for subclasses like WSGIRequest to implement. Return 'http' by
        default.
        """
        return 'http'

    @property
    def scheme(self):
        if settings.SECURE_PROXY_SSL_HEADER:
            try:
                header, secure_value = settings.SECURE_PROXY_SSL_HEADER
            except ValueError:
                raise ImproperlyConfigured(
                    'The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.'
                )
            header_value = self.META.get(header)
            if header_value is not None:
                return 'https' if header_value == secure_value else 'http'
        return self._get_scheme()

    def is_secure(self):
        return self.scheme == 'https'

    def is_ajax(self):
        return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'

    @property
    def encoding(self):
        return self._encoding

    @encoding.setter
    def encoding(self, val):
        """
        Set the encoding used for GET/POST accesses. If the GET or POST
        dictionary has already been created, remove and recreate it on the
        next access (so that it is decoded correctly).
        """
        self._encoding = val
        if hasattr(self, 'GET'):
            del self.GET
        if hasattr(self, '_post'):
            del self._post

    def _initialize_handlers(self):
        self._upload_handlers = [
            uploadhandler.load_handler(handler, self)
            for handler in settings.FILE_UPLOAD_HANDLERS
        ]

    @property
    def upload_handlers(self):
        if not self._upload_handlers:
            # If there are no upload handlers defined, initialize them from settings.
            self._initialize_handlers()
        return self._upload_handlers

    @upload_handlers.setter
    def upload_handlers(self, upload_handlers):
        if hasattr(self, '_files'):
            raise AttributeError(
                "You cannot set the upload handlers after the upload has been processed."
            )
        self._upload_handlers = upload_handlers

    def parse_file_upload(self, META, post_data):
        """Return a tuple of (POST QueryDict, FILES MultiValueDict)."""
        self.upload_handlers = ImmutableList(
            self.upload_handlers,
            warning=
            "You cannot alter upload handlers after the upload has been processed."
        )
        parser = MultiPartParser(META, post_data, self.upload_handlers,
                                 self.encoding)
        return parser.parse()

    @property
    def body(self):
        if not hasattr(self, '_body'):
            if self._read_started:
                raise RawPostDataException(
                    "You cannot access body after reading from request's data stream"
                )

            # Limit the maximum request data size that will be handled in-memory.
            if (settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
                    and int(self.META.get('CONTENT_LENGTH')
                            or 0) > settings.DATA_UPLOAD_MAX_MEMORY_SIZE):
                raise RequestDataTooBig(
                    'Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE.'
                )

            try:
                self._body = self.read()
            except IOError as e:
                raise UnreadablePostError(*e.args) from e
            self._stream = BytesIO(self._body)
        return self._body

    def _mark_post_parse_error(self):
        self._post = QueryDict()
        self._files = MultiValueDict()

    def _load_post_and_files(self):
        """Populate self._post and self._files if the content-type is a form type"""
        if self.method != 'POST':
            self._post, self._files = QueryDict(
                encoding=self._encoding), MultiValueDict()
            return
        if self._read_started and not hasattr(self, '_body'):
            self._mark_post_parse_error()
            return

        if self.content_type == 'multipart/form-data':
            if hasattr(self, '_body'):
                # Use already read data
                data = BytesIO(self._body)
            else:
                data = self
            try:
                self._post, self._files = self.parse_file_upload(
                    self.META, data)
            except MultiPartParserError:
                # An error occurred while parsing POST data. Since when
                # formatting the error the request handler might access
                # self.POST, set self._post and self._file to prevent
                # attempts to parse POST data again.
                self._mark_post_parse_error()
                raise
        elif self.content_type == 'application/x-www-form-urlencoded':
            self._post, self._files = QueryDict(
                self.body, encoding=self._encoding), MultiValueDict()
        else:
            self._post, self._files = QueryDict(
                encoding=self._encoding), MultiValueDict()

    def close(self):
        if hasattr(self, '_files'):
            for f in chain.from_iterable(l[1] for l in self._files.lists()):
                f.close()

    # File-like and iterator interface.
    #
    # Expects self._stream to be set to an appropriate source of bytes by
    # a corresponding request subclass (e.g. WSGIRequest).
    # Also when request data has already been read by request.POST or
    # request.body, self._stream points to a BytesIO instance
    # containing that data.

    def read(self, *args, **kwargs):
        self._read_started = True
        try:
            return self._stream.read(*args, **kwargs)
        except IOError as e:
            raise UnreadablePostError(*e.args) from e

    def readline(self, *args, **kwargs):
        self._read_started = True
        try:
            return self._stream.readline(*args, **kwargs)
        except IOError as e:
            raise UnreadablePostError(*e.args) from e

    def __iter__(self):
        return iter(self.readline, b'')

    def xreadlines(self):
        warnings.warn(
            'HttpRequest.xreadlines() is deprecated in favor of iterating the '
            'request.',
            RemovedInDjango30Warning,
            stacklevel=2,
        )
        yield from self

    def readlines(self):
        return list(self)
Пример #8
0
    def _open(self):
        magic, header_size = struct.unpack("<II", self.fp.read(8))
        if header_size != 124:
            raise IOError("Unsupported header size %r" % (header_size))
        header_bytes = self.fp.read(header_size - 4)
        if len(header_bytes) != 120:
            raise IOError("Incomplete header: %s bytes" % len(header_bytes))
        header = BytesIO(header_bytes)

        flags, height, width = struct.unpack("<3I", header.read(12))
        self._size = (width, height)
        self.mode = "RGBA"

        pitch, depth, mipmaps = struct.unpack("<3I", header.read(12))
        struct.unpack("<11I", header.read(44))  # reserved

        # pixel format
        pfsize, pfflags = struct.unpack("<2I", header.read(8))
        fourcc = header.read(4)
        bitcount, = struct.unpack("<I", header.read(4))
        masks = struct.unpack("<4I", header.read(16))
        if pfflags & 0x40:
            # DDPF_RGB - Texture contains uncompressed RGB data
            masks = {
                mask: ["R", "G", "B", "A"][i]
                for i, mask in enumerate(masks)
            }
            rawmode = ""
            if bitcount == 32:
                rawmode += masks[0xFF000000]
            rawmode += masks[0xFF0000] + masks[0xFF00] + masks[0xFF]

            self.tile = [("raw", (0, 0) + self.size, 0, (rawmode, 0, 1))]
        else:
            data_start = header_size + 4
            n = 0
            if fourcc == b"DXT1":
                self.pixel_format = "DXT1"
                n = 1
            elif fourcc == b"DXT3":
                self.pixel_format = "DXT3"
                n = 2
            elif fourcc == b"DXT5":
                self.pixel_format = "DXT5"
                n = 3
            elif fourcc == b"DX10":
                data_start += 20
                # ignoring flags which pertain to volume textures and cubemaps
                dxt10 = BytesIO(self.fp.read(20))
                dxgi_format, dimension = struct.unpack("<II", dxt10.read(8))
                if dxgi_format in (DXGI_FORMAT_BC7_TYPELESS,
                                   DXGI_FORMAT_BC7_UNORM):
                    self.pixel_format = "BC7"
                    n = 7
                elif dxgi_format == DXGI_FORMAT_BC7_UNORM_SRGB:
                    self.pixel_format = "BC7"
                    self.im_info["gamma"] = 1 / 2.2
                    n = 7
                else:
                    raise NotImplementedError("Unimplemented DXGI format %d" %
                                              (dxgi_format))
            else:
                raise NotImplementedError("Unimplemented pixel format %r" %
                                          (fourcc))

            self.tile = [("bcn", (0, 0) + self.size, data_start, (n))]
Пример #9
0
class WebPImageFile(ImageFile.ImageFile):

    format = "WEBP"
    format_description = "WebP image"

    def _open(self):
        if not _webp.HAVE_WEBPANIM:
            # Legacy mode
            data, width, height, self.mode, icc_profile, exif = _webp.WebPDecode(
                self.fp.read())
            if icc_profile:
                self.info["icc_profile"] = icc_profile
            if exif:
                self.info["exif"] = exif
            self._size = width, height
            self.fp = BytesIO(data)
            self.tile = [("raw", (0, 0) + self.size, 0, self.mode)]
            self._n_frames = 1
            return

        # Use the newer AnimDecoder API to parse the (possibly) animated file,
        # and access muxed chunks like ICC/EXIF/XMP.
        self._decoder = _webp.WebPAnimDecoder(self.fp.read())

        # Get info from decoder
        width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info(
        )
        self._size = width, height
        self.info["loop"] = loop_count
        bg_a, bg_r, bg_g, bg_b = (
            (bgcolor >> 24) & 0xFF,
            (bgcolor >> 16) & 0xFF,
            (bgcolor >> 8) & 0xFF,
            bgcolor & 0xFF,
        )
        self.info["background"] = (bg_r, bg_g, bg_b, bg_a)
        self._n_frames = frame_count
        self.mode = "RGB" if mode == "RGBX" else mode
        self.rawmode = mode
        self.tile = []

        # Attempt to read ICC / EXIF / XMP chunks from file
        icc_profile = self._decoder.get_chunk("ICCP")
        exif = self._decoder.get_chunk("EXIF")
        xmp = self._decoder.get_chunk("XMP ")
        if icc_profile:
            self.info["icc_profile"] = icc_profile
        if exif:
            self.info["exif"] = exif
        if xmp:
            self.info["xmp"] = xmp

        # Initialize seek state
        self._reset(reset=False)
        self.seek(0)

    def _getexif(self):
        if "exif" not in self.info:
            return None
        return dict(self.getexif())

    @property
    def n_frames(self):
        return self._n_frames

    @property
    def is_animated(self):
        return self._n_frames > 1

    def seek(self, frame):
        if not _webp.HAVE_WEBPANIM:
            return super(WebPImageFile, self).seek(frame)

        # Perform some simple checks first
        if frame >= self._n_frames:
            raise EOFError("attempted to seek beyond end of sequence")
        if frame < 0:
            raise EOFError("negative frame index is not valid")

        # Set logical frame to requested position
        self.__logical_frame = frame

    def _reset(self, reset=True):
        if reset:
            self._decoder.reset()
        self.__physical_frame = 0
        self.__loaded = -1
        self.__timestamp = 0

    def _get_next(self):
        # Get next frame
        ret = self._decoder.get_next()
        self.__physical_frame += 1

        # Check if an error occurred
        if ret is None:
            self._reset()  # Reset just to be safe
            self.seek(0)
            raise EOFError("failed to decode next frame in WebP file")

        # Compute duration
        data, timestamp = ret
        duration = timestamp - self.__timestamp
        self.__timestamp = timestamp

        # libwebp gives frame end, adjust to start of frame
        timestamp -= duration
        return data, timestamp, duration

    def _seek(self, frame):
        if self.__physical_frame == frame:
            return  # Nothing to do
        if frame < self.__physical_frame:
            self._reset()  # Rewind to beginning
        while self.__physical_frame < frame:
            self._get_next()  # Advance to the requested frame

    def load(self):
        if _webp.HAVE_WEBPANIM:
            if self.__loaded != self.__logical_frame:
                self._seek(self.__logical_frame)

                # We need to load the image data for this frame
                data, timestamp, duration = self._get_next()
                self.info["timestamp"] = timestamp
                self.info["duration"] = duration
                self.__loaded = self.__logical_frame

                # Set tile
                if self.fp and self._exclusive_fp:
                    self.fp.close()
                self.fp = BytesIO(data)
                self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)]

        return super(WebPImageFile, self).load()

    def tell(self):
        if not _webp.HAVE_WEBPANIM:
            return super(WebPImageFile, self).tell()

        return self.__logical_frame