async def get(self) -> StreamResponse: """Get file by hash of file from storage Request ------ <file_hash> str: should be contain only numbers and letters ------ Response ------ streaming bytes data """ file_hash = self.request.match_info['file_hash'].lower() if not file_hash: raise ValidationError(message='file_hash is empty') file_manager = self._create_file_manager() try: file_reader = await file_manager.get_file_reader(file_hash) response = StreamResponse(status=HTTPStatus.OK, headers={ 'Content-disposition': f'attachment; filename={file_hash}' }) response.enable_chunked_encoding() await response.prepare(self.request) async for chunk in file_reader(): await response.write(chunk) await response.write_eof() return response except FileNotFoundError: raise HTTPNotFound
async def export_plumbing(request, sql, *sql_args, filename, none_message, modify_records=None): response = StreamResponse(headers={ 'Content-Disposition': f'attachment;filename={filename}.csv' }) response.content_type = 'text/csv' await response.prepare(request) try: response_file = ResponsePseudoFile(response) writer = None async with request['conn'].transaction(): async for record in request['conn'].cursor(sql, *sql_args): if modify_records: data = modify_records(record) else: data = record if writer is None: writer = DictWriter(response_file, fieldnames=list(data.keys())) writer.writeheader() writer.writerow({k: '' if v is None else str(v) for k, v in data.items()}) await response_file.write_response() if writer is None: writer = DictWriter(response_file, fieldnames=['message']) writer.writeheader() writer.writerow({'message': none_message}) await response_file.write_response() except Exception: # pragma no cover logger.exception('error generating export, filename: %s', filename) return response
async def ndjsonify(async_iterator, request): response = StreamResponse(status=200, reason='OK') response.headers['Content-Type'] = 'application/x-ndjson' await response.prepare(request) async for element in async_iterator: text = json_dumps(element) + '\n' await response.write(text.encode('utf-8')) return response
async def on_response_prepare(request: Request, response: StreamResponse) -> None: # Headers are required for the UI to work, since it uses SharedArrayBuffer. # https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer if fullmatch("/ui/.*", request.path, RegexFlag.IGNORECASE): response.headers["Cross-Origin-Opener-Policy"] = "same-origin" response.headers["Cross-Origin-Embedder-Policy"] = "require-corp" # In case of a CORS request: a response header to allow the origin is required if request.headers.get("sec-fetch-mode") == "cors": response.headers["Access-Control-Allow-Origin"] = request.headers.get("origin", "*")
async def get_recipe_image(req: Request): # get recipe by id try: r = recipe.get_by_id(req.match_info.get("id")) except RecipeError: return json_response(RECIPE_ERRORS.DOES_NOT_EXIST) try: resp = StreamResponse( status=200, reason='OK', headers={'Content-Type': r._recipe.image.content_type}) await resp.prepare(req) size = 0 out = r._recipe.image.get() while True: chunk = out.readchunk() if not chunk: break size += len(chunk) await resp.write(chunk) _LOGGER.info(size) await resp.write_eof() return resp """ content = r._recipe.image.read() content_type = r._recipe.image.content_type return Response(body=content, headers=MultiDict({'CONTENT-TYPE': content_type})) """ except AttributeError: return Response(status=404)
async def sitemap(request): response = StreamResponse() response.content_type = 'application/xml' await response.prepare(request) await response.write( b'<?xml version="1.0" encoding="UTF-8"?>\n' b'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n') try: company_domain = await request['conn'].fetchval( 'SELECT domain FROM companies WHERE id=$1', request['company_id']) async def write_url(uri_, latest_update_, priority): await response.write( (f'<url>' f'<loc>https://{company_domain}/{uri_}</loc>' f'<lastmod>{latest_update_:%Y-%m-%d}</lastmod>' f'<changefreq>daily</changefreq>' f'<priority>{priority:0.1f}</priority>' f'</url>\n').encode()) cats = {} async with request['conn'].transaction(): async for ( cat_uri, uri, latest_update, highlight) in request['conn'].cursor(sitemap_events_sql): cat_latest_update = cats.get(cat_uri) if cat_latest_update is None or latest_update > cat_latest_update: cats[cat_uri] = latest_update await write_url(uri, latest_update, 0.7 if highlight else 0.5) for cat_uri, latest_update in cats.items(): await write_url(cat_uri, latest_update, 0.9) if cats: await write_url('', max(cats.values()), 1) else: await write_url('', date.today(), 1) except Exception: # pragma no cover logger.exception('error generating sitemap') await response.write(b'</urlset>\n') return response
async def request_server(self, request: Request): guid = str(uuid.uuid4()) wrapped_request = await WrappedRequest.from_request(guid, request) if self.ws is None: return web.Response(status=502, text='502 - Service not available') try: self.responses[guid] = response = StreamResponse() response.original_request = request self.responses_finalized[guid] = response_finalized = Future() await self.ws.send_json(dataclasses.asdict(wrapped_request)) await asyncio.wait_for(response_finalized, timeout=RESPONSE_TIMEOUT_MS) await response.write_eof() return response finally: if guid in self.responses: del self.responses[guid] if guid in self.responses_finalized: del self.responses_finalized[guid]
async def impl(request: Request, handler: Handler) -> StreamResponse: is_options = request.method == "OPTIONS" is_preflight = is_options and "Access-Control-Request-Method" in request.headers if is_preflight: resp = StreamResponse() else: resp = await handler(request) origin = request.headers.get("Origin") if not origin: return resp resp.headers["Access-Control-Allow-Origin"] = "*" resp.headers["Access-Control-Expose-Headers"] = "*" if is_options: resp.headers["Access-Control-Allow-Headers"] = ", ".join( allow_headers) resp.headers["Access-Control-Allow-Methods"] = ", ".join( ("OPTIONS", "POST")) return resp
async def _stream_handler(self, request: Request) -> typing.Optional[Response]: message_id: str = request.match_info["message_id"] if not message_id.isdigit(): return Response(status=401) token: str = request.match_info["token"] if not token.isdigit(): return Response(status=401) if not self._check_token(int(message_id), int(token)): return Response(status=403) range_header = request.headers.get("Range") if range_header is None: offset = 0 data_to_skip = False max_size = None else: try: offset, data_to_skip, max_size = parse_http_range( range_header, self._config.block_size) except ValueError: return Response(status=400) if data_to_skip > self._config.block_size: return Response(status=500) try: message = await self._mtproto.get_message(int(message_id)) except ValueError: return Response(status=404) if not isinstance(message.media, MessageMediaDocument): return Response(status=404) if not isinstance(message.media.document, Document): return Response(status=404) size = message.media.document.size read_after = offset + data_to_skip if read_after > size: return Response(status=400) if (max_size is not None) and (size < max_size): return Response(status=400) if max_size is None: max_size = size stream = StreamResponse( status=206 if (read_after or (max_size != size)) else 200) self._write_upnp_headers(stream) stream.headers.setdefault("Content-Range", f"bytes {read_after}-{max_size}/{size}") stream.headers.setdefault("Accept-Ranges", "bytes") stream.headers.setdefault("Content-Length", str(size)) try: filename = mtproto_filename(message) except TypeError: filename = f"file_{message.media.document.id}" self._write_filename_header(stream, filename) await stream.prepare(request) while offset < max_size: block = await self._mtproto.get_block(message, offset, self._config.block_size) new_offset = offset + len(block) if data_to_skip: block = block[data_to_skip:] data_to_skip = False if new_offset > max_size: block = block[:-(new_offset - max_size)] offset = new_offset if request.transport.is_closing(): break await stream.write(block) await stream.write_eof()
async def _stream_handler(self, request: Request) -> typing.Optional[Response]: _message_id: str = request.match_info["message_id"] if not _message_id.isdigit(): return Response(status=401) _token: str = request.match_info["token"] if not _token.isdigit(): return Response(status=401) token = int(_token) del _token message_id = int(_message_id) del _message_id local_token = serialize_token(message_id, token) if not self._check_local_token(local_token): return Response(status=403) range_header = request.headers.get("Range") if range_header is None: offset = 0 data_to_skip = False max_size = None else: try: offset, data_to_skip, max_size = parse_http_range( range_header, self._config.block_size) except ValueError: return Response(status=400) if data_to_skip > self._config.block_size: return Response(status=500) try: message = await self._mtproto.get_message(int(message_id)) except ValueError: return Response(status=404) if not isinstance(message.media, MessageMediaDocument): return Response(status=404) if not isinstance(message.media.document, Document): return Response(status=404) size = message.media.document.size read_after = offset + data_to_skip if read_after > size: return Response(status=400) if (max_size is not None) and (size < max_size): return Response(status=400) if max_size is None: max_size = size stream = StreamResponse( status=206 if (read_after or (max_size != size)) else 200) self._write_http_range_headers(stream, read_after, size, max_size) try: filename = mtproto_filename(message) except TypeError: filename = f"file_{message.media.document.id}" self._write_filename_header(stream, filename) self._write_upnp_headers(stream) await stream.prepare(request) while offset < max_size: self._feed_timeout(message_id, message.from_id, local_token, size) block = await self._mtproto.get_block(message, offset, self._config.block_size) new_offset = offset + len(block) if data_to_skip: block = block[data_to_skip:] data_to_skip = False if new_offset > max_size: block = block[:-(new_offset - max_size)] if request.transport is None: break self._feed_stream_trasport(local_token, request.transport) if request.transport.is_closing(): break await stream.write(block) self._feed_downloaded_blocks(offset, local_token) offset = new_offset await stream.write_eof()
def enable_compression(request: Request, response: StreamResponse) -> None: # The UI can not handle compressed responses. Allow compression only if requested by somebody else if "resotoui-via" not in request.headers: response.enable_compression()