async def proxy(request): target_url = TARGET_SERVER_PROTOCOL + '://' + TARGET_SERVER_HOST target_url = urljoin(target_url, request.match_info['path']) headers = dict(request.headers) # Get range from query params or headers range = request.query.get('range', '') if range: range_headers = request.headers.get('Range', range) if range != range_headers: raise web.HTTPRequestRangeNotSatisfiable() #@TODO: pass range from query params to headers data = await request.read() get_data = request.rel_url.query async with aiohttp.ClientSession() as session: headers['Host'] = TARGET_SERVER_HOST async with session.request(request.method, target_url, headers=headers, params=get_data, data=data) as resp: res = resp raw = await res.read() content_type = res.headers.get('Content-Type') # Yes, globals are bad, but let's use here global BYTES_TOTAL BYTES_TOTAL += len(raw) return web.Response(body=raw, headers={'Content-Type': content_type})
def abort(code): if code == 400: return web.HTTPBadRequest() elif code == 401: return web.HTTPUnauthorized() elif code == 402: return web.HTTPPaymentRequired() elif code == 403: return web.HTTPForbidden() elif code == 404: return web.HTTPNotFound() elif code == 405: return web.HTTPMethodNotAllowed() elif code == 406: return web.HTTPNotAcceptable() elif code == 407: return web.HTTPProxyAuthenticationRequired() elif code == 408: return web.HTTPRequestTimeout() elif code == 409: return web.HTTPConflict() elif code == 410: return web.HTTPGone() elif code == 411: return web.HTTPLengthRequired() elif code == 412: return web.HTTPPreconditionFailed() elif code == 413: return web.HTTPRequestEntityTooLarge() elif code == 414: return web.HTTPRequestURITooLong() elif code == 415: return web.HTTPUnsupportedMediaType() elif code == 416: return web.HTTPRequestRangeNotSatisfiable() elif code == 417: return web.HTTPExpectationFailed() elif code == 421: return web.HTTPMisdirectedRequest() elif code == 422: return web.HTTPUnprocessableEntity() elif code == 424: return web.HTTPFailedDependency() elif code == 426: return web.HTTPUpgradeRequired() elif code == 428: return web.HTTPPreconditionRequired() elif code == 429: return web.HTTPTooManyRequests() elif code == 431: return web.HTTPRequestHeaderFieldsTooLarge() elif code == 451: return web.HTTPUnavailableForLegalReasons() else: return web.HTTPBadRequest()
async def upload_chunk_by_patch(request): images_directory = request.app["images_directory"] repository = request.match_info["repository"] session_id = request.match_info["session_id"] request.app["token_checker"].authenticate(request, repository, ["push"]) session = request.app["sessions"].get(session_id, None) if not session: raise exceptions.BlobUploadInvalid(session=session_id) uploads = images_directory / "uploads" if not uploads.exists(): os.makedirs(uploads) upload_path = uploads / session_id content_range = request.headers.get("Content-Range", "") if content_range: size = 0 if os.path.exists(upload_path): size = os.path.getsize(upload_path) content_range = request.headers["Content-Range"] left, right = content_range.split("-") if int(left) != size: raise web.HTTPRequestRangeNotSatisfiable( headers={ "Location": f"/v2/{repository}/blobs/uploads/{session_id}", "Range": f"0-{size}", "Content-Length": "0", "Blob-Upload-Session-ID": session_id, }) async with AIOFile(upload_path, "ab") as fp: writer = Writer(fp) chunk = await request.content.read(1024 * 1024) while chunk: await writer(chunk) session["hasher"].update(chunk) session["size"] += len(chunk) chunk = await request.content.read(1024 * 1024) await fp.fsync() size = os.stat(upload_path).st_size - 1 return web.Response( status=202, headers={ "Location": f"/v2/{repository}/blobs/uploads/{session_id}", "Blob-Upload-Session-ID": session_id, "Range": f"0-{size}", }, )
def prepare_address(address): try: return _prepare_address(address) except: raise web.HTTPRequestRangeNotSatisfiable()
def detect_address(address): try: return _detect_address(address) except: raise web.HTTPRequestRangeNotSatisfiable()
async def postschema_middleware(request, handler): request.handler = handler set_init_logging_context(request) if '/actor/logout/' in request.path: request.middleware_mode = 'public' request.operation = request.method.lower() auth_ctxt = AuthContext(request) auth_ctxt.request_type = 'public' auth_ctxt.ip_address = request.IP request.session = auth_ctxt try: await auth_ctxt.set_session_context() except web.HTTPUnauthorized as unauth_exc: request.session = {} request.session['actor_id'] = getattr(unauth_exc, 'actor_id', 'Unrecognized') return await handler(request) try: auth_ctxt = AuthContext(request, **handler._perm_options) except AttributeError: # e.g 404 request.operation = request.method.lower() auth_ctxt = AuthContext(request) auth_ctxt.request_type = 'public' auth_ctxt.ip_address = request.IP request.session = auth_ctxt await auth_ctxt.set_session_context() return await handler(request) except TypeError: if 'roles' in handler._perm_options: request.operation = request.method.lower() auth_ctxt = AuthContext(request) auth_ctxt.request_type = 'authed' auth_ctxt.ip_address = request.IP await auth_ctxt.set_session_context() if str(auth_ctxt.status) != '1': raise web.HTTPForbidden(reason='Account inactive') request.session = auth_ctxt set_logging_context(request.app, id=auth_ctxt['actor_id'], email=auth_ctxt['email'], workspace=auth_ctxt['workspace']) auth_ctxt.authorize_standalone(**handler._perm_options) async with switch_workspace(request): resp = await prepare_shielded_response(request, handler) with suppress(AttributeError): await spawn(request, handler.log_request(request, resp)) await spawn( request, request.app.config.on_response_done(request, resp)) resp.headers['ETag'] = request.app.spec_hash return resp raise if request.method != 'POST': raise web.HTTPMethodNotAllowed(request.method, allowed_methods=['POST']) try: op = request.headers['Range'] except KeyError: raise web.HTTPBadRequest( reason='`Range` header is required to specify the operation name') if op not in ALLOWED_OPERATIONS: raise web.HTTPRequestRangeNotSatisfiable( reason=f'`{op}` is not a recognized operation name') request.operation = op.lower() auth_ctxt.set_level_permissions() auth_ctxt.ip_address = request.IP try: await auth_ctxt.set_session_context() except web.HTTPException as err_resp: resp = err_resp with suppress(AttributeError): await spawn(request, handler.log_request(request, resp)) await spawn(request, request.app.config.on_response_done(request, resp)) raise resp if auth_ctxt and str(auth_ctxt.status) != '1': resp = web.HTTPForbidden(reason='Account inactive') await spawn(request, handler.log_request(request, resp)) with suppress(AttributeError): await spawn(request, request.app.config.on_response_done(request, resp)) raise resp extra_ctxt = { 'id': auth_ctxt['actor_id'], 'email': auth_ctxt['email'], 'workspace': auth_ctxt['workspace'] } if auth_ctxt.session_ctxt else {} set_logging_context(request.app, **extra_ctxt) request.session = auth_ctxt async with switch_workspace(request): request.auth_conditions = auth_ctxt.authorize() resp = None try: resp = await prepare_shielded_response(request, handler) except web.HTTPException as err_resp: resp = err_resp with suppress(AttributeError): if request.path not in ['/actor/logout/', '/actor/login/']: await spawn(request, handler.log_request(request, resp)) await spawn(request, request.app.config.on_response_done(request, resp)) resp.headers['ETag'] = request.app.spec_hash if request.session.delete_session_cookie: request.app.info_logger.info('Deleting session cookie') resp.del_cookie('postsession') return resp
timeout=track.stream_settings.hls_part_timeout) and (segment := track.get_segment(int(sequence))))): return web.Response( body=None, status=404, headers={ "Cache-Control": f"max-age={track.target_duration:.0f}" }, ) # If the part is ready or has been hinted, if int(part_num) == len(segment.parts): await track.part_recv( timeout=track.stream_settings.hls_part_timeout) if int(part_num) >= len(segment.parts): return web.HTTPRequestRangeNotSatisfiable( headers={ "Cache-Control": f"max-age={track.target_duration:.0f}", }) return web.Response( body=segment.parts[int(part_num)].data, headers={ "Content-Type": "video/iso.segment", "Cache-Control": f"max-age={6*track.target_duration:.0f}", }, ) class HlsSegmentView(StreamView): """Stream view to serve a HLS fmp4 segment.""" url = r"/api/hls/{token:[a-f0-9]+}/segment/{sequence:\d+}.m4s" name = "api:stream:hls:segment"
# of a segment, there is a small chance it may have arrived before the # segment has been put. If this happens, wait for one part and retry. if not ((segment := track.get_segment(int(sequence))) or (await track.part_recv( timeout=track.stream_settings.hls_part_timeout) and (segment := track.get_segment(int(sequence))))): return web.Response( body=None, status=HTTPStatus.NOT_FOUND, ) # If the part is ready or has been hinted, if int(part_num) == len(segment.parts): await track.part_recv( timeout=track.stream_settings.hls_part_timeout) if int(part_num) >= len(segment.parts): return web.HTTPRequestRangeNotSatisfiable() return web.Response( body=segment.parts[int(part_num)].data, headers={ "Content-Type": "video/iso.segment", }, ) class HlsSegmentView(StreamView): """Stream view to serve a HLS fmp4 segment.""" url = r"/api/hls/{token:[a-f0-9]+}/segment/{sequence:\d+}.m4s" name = "api:stream:hls:segment" cors_allowed = True
async def watch_stream(self: 'webgram.BareServer', request: web.Request) -> web.Response: if request.match_info.get("h"): hash = self.decode(request.match_info["h"]) peer = self.config.STATS_CHANNEL mid = hash elif request.match_info.get("hash"): hash = self.decode(request.match_info["hash"]).split(":") peer = self.to_int_safe(hash[0]) mid = hash[1] else: #peer = self.to_int_safe(request.match_info["peer"]) #mid = request.match_info["mid"] return web.Response( text= "This link is no longer supported, please create a new link") if not mid.isdigit() or not await self.validate_peer(peer): return web.HTTPNotFound() rand = random.randint(1, 2) if rand == 1: message: Message = await self.client.get_messages(peer, ids=int(mid)) elif rand == 2: message: Message = await self.client2.get_messages(peer, ids=int(mid)) #elif rand == 3: # message: Message = await self.master.get_messages(peer, ids=int(mid)) if not message or not message.file: return web.HTTPNotFound() offset = request.headers.get("Range", 0) if not isinstance(offset, int): matches = RANGE_REGEX.search(offset) if matches is None: return web.HTTPBadRequest() offset = matches.group(1) if not offset.isdigit(): return web.HTTPBadRequest() offset = int(offset) file_size = message.file.size download_skip = (offset // BLOCK_SIZE) * BLOCK_SIZE read_skip = offset - download_skip if request.match_info.get("name"): name = request.match_info["name"] else: name = self.get_file_name(message) if download_skip >= file_size: return web.HTTPRequestRangeNotSatisfiable() if read_skip > BLOCK_SIZE: return web.HTTPInternalServerError() resp = web.StreamResponse( headers={ 'Content-Type': message.file.mime_type, #'application/octet-stream', 'Accept-Ranges': 'bytes', 'Content-Range': f'bytes {offset}-{file_size}/{file_size}', "Content-Length": str(file_size), "Content-Disposition": f'attachment; filename={name}', }, status=206 if offset else 200, ) await resp.prepare(request) if rand == 1: cls = self.client.iter_download(message.media, offset=download_skip) elif rand == 2: cls = self.client2.iter_download(message.media, offset=download_skip) #elif rand == 3: # cls = self.master.iter_download(message.media, offset=download_skip) async for part in cls: if len(part) < read_skip: read_skip -= len(part) elif read_skip: await resp.write(part[read_skip:]) read_skip = 0 else: await resp.write(part) return resp