async def fetch_headers_from_s3(self): local_header_size = self.local_header_file_size() resume_header = {"Range": f"bytes={local_header_size}-"} async with utils.aiohttp_request('get', HEADERS_URL, headers=resume_header) as response: if response.status == 406 or response.content_length < HEADER_SIZE: # our file is bigger log.warning("s3 is more out of date than we are") return if response.content_length % HEADER_SIZE != 0: log.warning("s3 appears to have corrupted header") return final_size_after_download = response.content_length + local_header_size write_mode = "wb" if local_header_size > 0: log.info("Resuming download of %i bytes from s3", response.content_length) write_mode = "a+b" with open(self.headers_file, write_mode) as fd: while True: chunk = await response.content.read(512) if not chunk: break fd.write(chunk) self._headers_progress_percent = math.ceil( float(fd.tell()) / float(final_size_after_download) * 100) log.info( "fetched headers from s3, now verifying integrity after download." ) self._check_header_file_integrity()
async def _emit(self, record): payload = self.format(record) async with utils.aiohttp_request('post', self.url, data=payload.encode(), cookies=self.cookies) as response: self.cookies.update(response.cookies)
async def test_forbidden(self): self.data = get_random_bytes(1000) await self._setup_stream(self.data, file_size=1000) url = f'http://{self.daemon.conf.streaming_host}:{self.daemon.conf.streaming_port}/get/foo' self.daemon.conf.streaming_get = False async with aiohttp_request('get', url) as req: self.assertEqual(403, req.status)
async def _post(self, endpoint, data): # there is an issue with a timing condition with keep-alive # that is best explained here: https://github.com/mikem23/keepalive-race # # If you make a request, wait just the right amount of time, # then make another request, the requests module may opt to # reuse the connection, but by the time the server gets it the # timeout will have expired. # # by forcing the connection to close, we will disable the keep-alive. assert endpoint[0] == '/' request_kwargs = { 'method': 'POST', 'url': self.url + endpoint, 'headers': {'Connection': 'Close'}, 'auth': aiohttp.BasicAuth(self._write_key, ''), 'json': data, 'cookies': self.cookies } try: async with utils.aiohttp_request(**request_kwargs) as response: self.cookies.update(response.cookies) except Exception as e: log.exception('Encountered an exception while POSTing to %s: ', self.url + endpoint, exc_info=e)
async def get_external_ip(): # used if upnp is disabled or non-functioning try: async with utils.aiohttp_request("get", "https://api.lbry.io/ip") as resp: response = await resp.json() if response['success']: return response['data']['ip'] except Exception as e: pass
async def _test_range_requests(self): name = 'foo' url = f'http://{self.daemon.conf.streaming_host}:{self.daemon.conf.streaming_port}/get/{name}' async with aiohttp_request('get', url) as req: self.assertEqual(req.headers.get('Content-Type'), 'application/octet-stream') content_range = req.headers.get('Content-Range') content_length = int(req.headers.get('Content-Length')) streamed_bytes = await req.content.read() self.assertEqual(content_length, len(streamed_bytes)) return streamed_bytes, content_range, content_length
async def _post(self, data: typing.Dict): request_kwargs = { 'method': 'POST', 'url': self.url + '/track', 'headers': { 'Connection': 'Close' }, 'auth': aiohttp.BasicAuth(self._write_key, ''), 'json': data, 'cookies': self.cookies } try: async with utils.aiohttp_request(**request_kwargs) as response: self.cookies.update(response.cookies) except Exception as e: log.debug('Encountered an exception while POSTing to %s: ', self.url + '/track', exc_info=e)
async def _make_request(self) -> str: async with aiohttp_request('get', self.url, params=self.params) as response: return (await response.read()).decode()