Beispiel #1
0
class MockClientRedirectResponse:
    def __init__(self, loop):
        self.status = '302'
        self.headers = CIMultiDict({'location': '/somewhere_else'})
        self.content = StreamReader(loop=loop)
        self.content.feed_data(b'')
        self.content.feed_eof()

    def close(self):
        pass
Beispiel #2
0
class MockClientErrorResponse:
    def __init__(self, loop):
        self.status = '500'
        self.headers = {}
        self.content = StreamReader(loop=loop)
        self.content.feed_data(b'blah')
        self.content.feed_eof()

    def close(self):
        pass
Beispiel #3
0
    async def store(self, magnet: str, content: StreamReader, chunk_size=1024):
        """Check and store content file.

        :param magnet:
        :param content:
        :param chunk_size:
        :return:
        """
        to_path = self.get_absolute_path(magnet)
        # content_path = Path(to_path) / magnet_path(magnet)
        tmp_content_path = ''.join([str(to_path), 'tmp.%s' % random.randint(10000, 99999)])
        check = keccak.new(digest_bytes=32)

        try:
            Path(tmp_content_path).parent.mkdir(parents=True, exist_ok=True)
            with open(tmp_content_path, 'wb') as fd:
                async for chunk, _ in content.iter_chunks():
                    fd.write(chunk)
                    check.update(data=chunk)

            checksum = check.hexdigest()
            if checksum != magnet:
                self.log.error("Downloaded content file %s checksum %s didn't match", magnet, checksum)
                raise InvalidChecksum(magnet, checksum)
            shutil.move(tmp_content_path, to_path)
        finally:
            try:
                os.unlink(tmp_content_path)
            except FileNotFoundError:
                # it is ok, file was moved
                pass
Beispiel #4
0
    async def _store(self,
                     magnet: str,
                     content: StreamReader,
                     chunk_size=1024):
        """Check and store content file.

        :param magnet:
        :param content:
        :param chunk_size:
        :return:
        """
        to_path = self.get_absolute_path(magnet)
        tmp_content_path = ''.join(
            [str(to_path),
             'tmp.%s' % random.randint(10000, 99999)])
        check = keccak.new(digest_bytes=32)

        try:
            with open(tmp_content_path, 'wb') as fd:
                async for chunk, _ in content.iter_chunks():
                    fd.write(chunk)
                    check.update(data=chunk)

            checksum = check.hexdigest()
            if checksum != magnet:
                log.error(
                    "Downloaded content file %s checksum %s didn't match",
                    magnet, checksum)
                raise InvalidChecksum(magnet, checksum)
            shutil.move(tmp_content_path, to_path)
            return to_path
        finally:
            os.unlink(tmp_content_path)
Beispiel #5
0
    async def async_process_audio_stream(
        self, metadata: SpeechMetadata, stream: StreamReader
    ) -> SpeechResult:
        """Process an audio stream to STT service."""

        # Read available data
        async for _ in stream.iter_chunked(4096):
            pass

        return SpeechResult("Turn the Kitchen Lights on", SpeechResultState.SUCCESS)
Beispiel #6
0
async def iter_jsonlines(
    content: aiohttp.StreamReader,
    chunk_size: int = 1024 * 1024,
) -> AsyncIterator[bytes]:
    """
    Iterate line by line over the response's content.

    Usage::

        async for line in _iter_lines(response.content):
            pass

    This is an equivalent of::

        async for line in response.content:
            pass

    Except that the aiohttp's line iteration fails if the accumulated buffer
    length is above 2**17 bytes, i.e. 128 KB (`aiohttp.streams.DEFAULT_LIMIT`
    for the buffer's low-watermark, multiplied by 2 for the high-watermark).
    Kubernetes secrets and other fields can be much longer, up to MBs in length.

    The chunk size of 1MB is an empirical guess for keeping the memory footprint
    reasonably low on huge amount of small lines (limited to 1 MB in total),
    while ensuring the near-instant reads of the huge lines (can be a problem
    with a small chunk size due to too many iterations).

    .. seealso::
        https://github.com/zalando-incubator/kopf/issues/275
    """

    # Minimize the memory footprint by keeping at most 2 copies of a yielded line in memory
    # (in the buffer and as a yielded value), and at most 1 copy of other lines (in the buffer).
    buffer = b''
    async for data in content.iter_chunked(chunk_size):
        buffer += data
        del data

        start = 0
        index = buffer.find(b'\n', start)
        while index >= 0:
            line = buffer[start:index]
            if line:
                yield line
            del line
            start = index + 1
            index = buffer.find(b'\n', start)

        if start > 0:
            buffer = buffer[start:]

    if buffer:
        yield buffer
Beispiel #7
0
def _patch_stream_reader(reader: aiohttp.StreamReader) -> None:
    # https://github.com/pikvm/pikvm/issues/92
    # Infinite looping in BodyPartReader.read() because _at_eof flag.

    orig_read = reader.read

    async def read(self: aiohttp.StreamReader, n: int=-1) -> bytes:  # pylint: disable=invalid-name
        if self.is_eof():
            raise StreamerError("StreamReader.read(): Reached EOF")
        return (await orig_read(n))

    reader.read = types.MethodType(read, reader)  # type: ignore
Beispiel #8
0
    def build_response(self, method, url, params, data, headers):
        """"""
        try:
            if type(url) == URL:
                url = url.human_repr()

            # Check if we have to skip it
            if self.skip(url):
                return None, True

            # Go check see if response is in cassette
            if not data:
                data = {}
            if params:
                data.update(params)

            request = vcr.request.Request(method, url, data, headers)
            cassette = self.load_cassette(url)
            resp_json = cassette.play_response(request)
        except UnhandledHTTPRequestError:
            # Response not seen yet in cassette
            return None, False

        # Response was found in cassette
        cassette.play_counts = collections.Counter()
        # Create the response
        resp = ClientResponse(
            method,
            URL(url),
            request_info=Mock(),
            writer=Mock(),
            continue100=None,
            timer=TimerNoop(),
            traces=[],
            loop=Mock(),
            session=Mock(),
        )
        # Replicate status code and reason
        resp.status = resp_json['status']['code']
        resp.reason = resp_json['status']['message']

        # Set headers and content
        resp._headers = CIMultiDict(resp_json['headers'])
        resp.content = StreamReader(Mock(), limit=DEFAULT_STREAM_LIMIT)

        # Get the data
        data = resp_json['body']['data']

        resp.content.feed_data(data)
        resp.content.feed_eof()

        return resp, False
Beispiel #9
0
    def build_response(self):
        if isinstance(self.exception, Exception):
            raise self.exception
        self.resp = ClientResponse(self.method, URL(self.url))
        # we need to initialize headers manually
        self.resp.headers = CIMultiDict({hdrs.CONTENT_TYPE: self.content_type})
        if self.headers:
            self.resp.headers.update(self.headers)
        self.resp.status = self.status
        self.resp.content = StreamReader()
        self.resp.content.feed_data(self.body)
        self.resp.content.feed_eof()

        return self.resp
Beispiel #10
0
async def read_bytes(reader: StreamReader, chunk_size: int = 2**18) -> bytes:
    """
    Read image raw bytes from `reader`.

    Raises
    ------
    ValidationError:
        If the image file is too large.
    """
    buffer = b""
    async for chunk in reader.iter_chunked(chunk_size):
        buffer += chunk
        if len(buffer) > config.colorific.image_max_size_bytes:
            raise ValidationError(IMAGE_TOO_LARGE_ERROR, field_name="image")

    return buffer
Beispiel #11
0
    def build_response(self, method: str, url: str, payload: str, *,
                       session: ClientSession, status: int) -> ClientResponse:
        response = session._response_class(
            method,
            URL(url),
            request_info=self.mock_module.Mock(),
            writer=self.mock_module.Mock(),
            continue100=None,
            timer=TimerNoop(),
            traces=[],
            loop=session.loop,
            session=session)
        response._headers = CIMultiDictProxy(
            CIMultiDict({'Content-Type': 'application/json'}))
        response.status = status
        if status >= 400:
            response.reason = payload

        response.content = StreamReader(ResponseHandler(session.loop))
        response.content.feed_data(str.encode(payload))
        response.content.feed_eof()
        return response
Beispiel #12
0
class MockClientPostResponse:
    def __init__(self, loop, data):
        self.status = '200'
        self.headers = {}
        self.content = StreamReader(loop=loop)
        self.data = data
        # self.content.feed_data(b'got: ')
        # self.content.feed_data(data)
        # self.content.feed_eof()

    async def feed(self):
        self.content.feed_data(b'got: ')
        self.content.feed_data(await self.data)
        self.content.feed_eof()

    def close(self):
        pass
Beispiel #13
0
    async def _process_subscription_stream(
                self,
                stream: aiohttp.StreamReader,
                callback: RawCallbackType
            ) -> None:
        self.logger.debug('Processing subscription StreamReader')

        remaining_bytes = bytes()

        while not stream.at_eof():
            # This could easily be implemented with stream.readline(), however,
            # that method raises an exception if a line is longer than 2**16
            # bytes.

            next_bytes = await stream.read(self.buffer_size)
            chunks = (remaining_bytes + next_bytes).split(b'\n')

            for chunk in chunks[:-1]:
                try:
                    await self._process_chunk(chunk, callback)
                except Exception:
                    self.logger.error("Error processing chunk", exc_info=True)

            remaining_bytes = chunks[-1]
Beispiel #14
0
 def content(self):
     protocol = Mock(_reading_paused=False)
     stream = StreamReader(protocol)
     stream.feed_data(self.body)
     stream.feed_eof()
     return stream
Beispiel #15
0
 def stream_reader():
     return StreamReader()
Beispiel #16
0
 def stream_reader_factory(loop=None):
     return StreamReader()
Beispiel #17
0
 def stream_reader_factory():
     protocol = ResponseHandler()
     return StreamReader(protocol)
Beispiel #18
0
 def __init__(self, loop, data):
     self.status = '200'
     self.headers = {}
     self.content = StreamReader(loop=loop)
     self.data = data
Beispiel #19
0
 def __init__(self, loop):
     self.status = '302'
     self.headers = CIMultiDict({'location': '/somewhere_else'})
     self.content = StreamReader(loop=loop)
     self.content.feed_data(b'')
     self.content.feed_eof()
Beispiel #20
0
    async def build_response(self,
                             session,
                             method,
                             url,
                             params=None,
                             data=None,
                             headers=None,
                             *args,
                             **kwargs):
        self.requests.append({
            'method': method,
            'url': url,
            'params': params,
            'data': data,
            'headers': headers
        })
        url = URL(url)
        url_parts = [p.split('?')[-1] for p in url.parts]

        if data is None and 'json' in kwargs:
            data = json.dumps(kwargs['json'])
        resp = ClientResponse(
            method,
            url,
            request_info=Mock(),
            writer=Mock(),
            continue100=None,
            timer=TimerNoop(),
            traces=[],
            loop=Mock(),
            session=Mock(),
        )
        func_name = '_'.join(url_parts[-2:])
        func = None
        if hasattr(self, func_name):
            func = getattr(self, func_name)
        else:
            func_name = url_parts[-1]
            if hasattr(self, func_name):
                func = getattr(self, func_name)

        if func is not None:
            status, data, ct = await func(params or url.query, data, headers)
            resp.status = status
        else:
            print(f'Method {func_name} not implemented')
            resp.status = 200
            if method.lower() == 'patch':
                resp.status = 204
            ct = 'application/json'
            data = '{}'

        resp._headers = CIMultiDict({hdrs.CONTENT_TYPE: ct})
        loop = asyncio.get_event_loop()
        protocol = Mock(_reading_paused=False)
        resp.content = StreamReader(protocol,
                                    loop=loop,
                                    limit=DEFAULT_STREAM_LIMIT)
        if isinstance(data, str):
            data = data.encode('utf8')
        resp.content.feed_data(data)
        resp.content.feed_eof()
        self.responses.append(resp)
        return resp
Beispiel #21
0
 def __init__(self, loop):
     self.status = '500'
     self.headers = {}
     self.content = StreamReader(loop=loop)
     self.content.feed_data(b'blah')
     self.content.feed_eof()
Beispiel #22
0
 def stream_reader_factory(
         loop: 'Optional[asyncio.AbstractEventLoop]' = None):
     protocol = ResponseHandler(loop=loop)
     return StreamReader(protocol)
Beispiel #23
0
 def stream_reader_factory():
     return StreamReader()