Пример #1
0
 async def test_forbidden(self):
     self.data = get_random_bytes(1000)
     await self._setup_stream(self.data, file_size=1000)
     url = f'http://{self.daemon.conf.streaming_host}:{self.daemon.conf.streaming_port}/get/foo'
     self.daemon.conf.streaming_get = False
     async with aiohttp_request('get', url) as req:
         self.assertEqual(403, req.status)
Пример #2
0
 async def _emit(self, record):
     try:
         async with utils.aiohttp_request('post', self.url, data=self.format(record).encode(),
                                          cookies=self.cookies) as response:
             self.cookies.update(response.cookies)
     except ClientError:
         pass
Пример #3
0
 async def fetch_headers_from_s3(self):
     local_header_size = self.local_header_file_size()
     resume_header = {"Range": f"bytes={local_header_size}-"}
     async with utils.aiohttp_request('get',
                                      HEADERS_URL,
                                      headers=resume_header) as response:
         if response.status == 406 or response.content_length < HEADER_SIZE:  # our file is bigger
             log.warning("s3 is more out of date than we are")
             return
         if response.content_length % HEADER_SIZE != 0:
             log.warning("s3 appears to have corrupted header")
             return
         final_size_after_download = response.content_length + local_header_size
         write_mode = "wb"
         if local_header_size > 0:
             log.info("Resuming download of %i bytes from s3",
                      response.content_length)
             write_mode = "a+b"
         with open(self.headers_file, write_mode) as fd:
             while True:
                 chunk = await response.content.read(512)
                 if not chunk:
                     break
                 fd.write(chunk)
                 self._headers_progress_percent = math.ceil(
                     float(fd.tell()) / float(final_size_after_download) *
                     100)
         log.info(
             "fetched headers from s3, now verifying integrity after download."
         )
         self._check_header_file_integrity()
Пример #4
0
 async def fetch_headers_from_s3(self):
     local_header_size = self.headers.bytes_size
     resume_header = {"Range": f"bytes={local_header_size}-"}
     async with utils.aiohttp_request('get', HEADERS_URL, headers=resume_header) as response:
         if response.status == 406 or response.content_length < HEADER_SIZE:  # our file is bigger
             log.warning("s3 is more out of date than we are")
             return
         if response.content_length % HEADER_SIZE != 0:
             log.warning("s3 appears to have corrupted header")
             return
         final_size_after_download = response.content_length + local_header_size
         if local_header_size > 0:
             log.info("Resuming download of %i bytes from s3", response.content_length)
         buffer, header_size = b'', self.headers.header_size
         async for chunk in response.content.iter_any():
             chunk = buffer + chunk
             remaining = len(chunk) % header_size
             chunk, buffer = chunk[:-remaining], bytes(chunk[-remaining:])
             if not chunk:
                 continue
             if not await self.headers.connect(len(self.headers), chunk):
                 log.warning("Error connecting downloaded headers from at %s.", self.headers.height)
                 return
             self._headers_progress_percent = self._round_progress(
                 self.headers.bytes_size, final_size_after_download
             )
Пример #5
0
 async def get_downloadable_header_height(self) -> typing.Optional[int]:
     async with utils.aiohttp_request('HEAD', self.HEADERS_URL) as response:
         if response.status != 200:
             log.warning(
                 "Header download error, unexpected response code: %s",
                 response.status)
             return -1
         return response.content_length // self.headers.header_size
Пример #6
0
 async def get_downloadable_header_height(self) -> typing.Optional[int]:
     try:
         async with utils.aiohttp_request('HEAD', HEADERS_URL) as response:
             if response.status != 200:
                 log.warning("Header download error, unexpected response code: %s", response.status)
             return response.content_length // HEADER_SIZE
     except OSError:
         log.exception("Failed to download headers using https.")
Пример #7
0
async def jsonrpc_post(url: str, method: str, params: dict = None, **kwargs) -> any:
    params = params or {}
    params.update(kwargs)
    json_body = {'jsonrpc': '2.0', 'id': 1, 'method': method, 'params': params}
    async with utils.aiohttp_request('POST', url, json=json_body) as response:
        try:
            result = await response.json()
            return result['result'] if 'result' in result else result
        except Exception as cte:
            log.exception('Unable to decode response from server: %s', cte)
            return await response.text()
Пример #8
0
    async def _test_range_requests(self):
        name = 'foo'
        url = f'http://{self.daemon.conf.streaming_host}:{self.daemon.conf.streaming_port}/get/{name}'

        async with aiohttp_request('get', url) as req:
            self.assertEqual(req.headers.get('Content-Type'),
                             'application/octet-stream')
            content_range = req.headers.get('Content-Range')
            content_length = int(req.headers.get('Content-Length'))
            streamed_bytes = await req.content.read()
        self.assertEqual(content_length, len(streamed_bytes))
        return streamed_bytes, content_range, content_length
Пример #9
0
 async def get_response(self):
     async with aiohttp_request('get',
                                self.url,
                                params=self.params,
                                timeout=self.request_timeout) as response:
         try:
             self._last_response = await response.json()
         except ContentTypeError as e:
             self._last_response = {}
             log.warning(
                 "Could not parse exchange rate response from %s: %s",
                 self.name, e.message)
             log.debug(await response.text())
         return self._last_response
Пример #10
0
 async def _post(self, data: typing.Dict):
     request_kwargs = {
         'method': 'POST',
         'url': self.url + '/track',
         'headers': {
             'Connection': 'Close'
         },
         'auth': aiohttp.BasicAuth(self._write_key, ''),
         'json': data,
         'cookies': self.cookies
     }
     try:
         async with utils.aiohttp_request(**request_kwargs) as response:
             self.cookies.update(response.cookies)
     except Exception as e:
         log.debug('Encountered an exception while POSTing to %s: ',
                   self.url + '/track',
                   exc_info=e)
Пример #11
0
async def jsonrpc_post(url: str, method: str, **params) -> any:
    json_body = {
        'jsonrpc': '2.0',
        'id': None,
        'method': method,
        'params': params
    }
    headers = {'Content-Type': 'application/json'}
    async with utils.aiohttp_request('POST',
                                     url,
                                     json=json_body,
                                     headers=headers) as response:
        try:
            result = await response.json()
            return result['result'] if 'result' in result else result
        except Exception as cte:
            log.exception('Unable to decode respose from server: %s', cte)
            return await response.text()
Пример #12
0
 async def fetch_headers_from_s3(self):
     local_header_size = self.local_header_file_size()
     resume_header = {"Range": f"bytes={local_header_size}-"}
     async with utils.aiohttp_request('get', self.HEADERS_URL, headers=resume_header) as response:
         if response.status == 406 or response.content_length < self.headers.header_size:  # our file is bigger
             log.warning("s3 is more out of date than we are")
             return
         final_size_after_download = response.content_length + local_header_size
         if final_size_after_download % self.headers.header_size != 0:
             log.warning("s3 appears to have corrupted header")
             return
         write_mode = "wb"
         if local_header_size > 0:
             log.info("Resuming download of %i bytes from s3", response.content_length)
             write_mode = "a+b"
         with open(self.headers_file, write_mode) as fd:
             while not response.content.at_eof():
                 local_header_size += fd.write(await response.content.readany())
                 self._headers_progress_percent = self._round_progress(
                     local_header_size, final_size_after_download
                 )
Пример #13
0
 async def _emit(self, record):
     payload = self.format(record)
     async with utils.aiohttp_request('post', self.url, data=payload.encode(), cookies=self.cookies) as response:
         self.cookies.update(response.cookies)
Пример #14
0
 async def _make_request(self) -> str:
     async with aiohttp_request('get', self.url,
                                params=self.params) as response:
         return (await response.read()).decode()