def test_drop_content_length_header_on_setting_len_to_None(): resp = StreamResponse() resp.content_length = 1 assert "1" == resp.headers['Content-Length'] resp.content_length = None assert 'Content-Length' not in resp.headers
def test_set_content_length_to_None_on_non_set(): resp = StreamResponse() resp.content_length = None assert 'Content-Length' not in resp.headers resp.content_length = None assert 'Content-Length' not in resp.headers
def test_drop_content_length_header_on_setting_len_to_None(self): resp = StreamResponse() resp.content_length = 1 self.assertEqual("1", resp.headers['Content-Length']) resp.content_length = None self.assertNotIn('Content-Length', resp.headers)
def test_set_content_length_to_None_on_non_set(self): resp = StreamResponse() resp.content_length = None self.assertNotIn('Content-Length', resp.headers) resp.content_length = None self.assertNotIn('Content-Length', resp.headers)
def test_set_content_length_to_None_on_non_set(self): req = self.make_request('GET', '/') resp = StreamResponse(req) resp.content_length = None self.assertNotIn('Content-Length', resp.headers) resp.content_length = None self.assertNotIn('Content-Length', resp.headers)
async def prepare_download(self, disposition=None, filename=None, content_type=None, size=None, **kwargs): if disposition is None: disposition = self.request.query.get('disposition', 'attachment') try: file = self.field.get(self.field.context or self.context) except AttributeError: file = None if file is None and filename is None: raise HTTPNotFound(content={ 'message': 'File or custom filename required to download' }) cors_renderer = app_settings['cors_renderer'](self.request) headers = await cors_renderer.get_headers() headers.update({ 'Content-Disposition': '{}; filename="{}"'.format( disposition, filename or file.filename) }) download_resp = StreamResponse(headers=headers) download_resp.content_type = content_type or file.guess_content_type() if size or file.size: download_resp.content_length = size or file.size await download_resp.prepare(self.request) return download_resp
async def hello(request): resp = StreamResponse() name = request.match_info.get('name', 'Anonymous') answer = ('Hello, ' + name).encode('utf8') resp.content_length = len(answer) await resp.prepare(request) resp.write(answer) await resp.write_eof() return resp
def hello(request): resp = StreamResponse(request) name = request.match_info.get('name', 'Anonimous') answer = ('Hello, ' + name).encode('utf8') resp.content_length = len(answer) resp.send_headers() resp.write(answer) yield from resp.write_eof() return resp
def intro(request): txt = textwrap.dedent("""\ Type {url}/hello/John {url}/simple or {url}/change_body in browser url bar """).format(url='127.0.0.1:8080') binary = txt.encode('utf8') resp = StreamResponse(request) resp.content_length = len(binary) resp.write(binary) return resp
async def test_remove_content_length_if_compression_enabled_http11(): writer = mock.Mock() async def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH not in headers assert headers.get(hdrs.TRANSFER_ENCODING, '') == 'chunked' writer.write_headers.side_effect = write_headers req = make_request('GET', '/', writer=writer) resp = StreamResponse() resp.content_length = 123 resp.enable_compression(ContentCoding.gzip) await resp.prepare(req) assert resp.content_length is None
async def test_force_compression_identity(): writer = mock.Mock() async def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH in headers assert hdrs.TRANSFER_ENCODING not in headers writer.write_headers.side_effect = write_headers req = make_request('GET', '/', writer=writer) resp = StreamResponse() resp.content_length = 123 resp.enable_compression(ContentCoding.identity) await resp.prepare(req) assert resp.content_length == 123
async def __call__(self): if hasattr(self.context, 'file_path'): filepath = str(self.context.file_path.absolute()) filename = self.context.file_path.name with open(filepath, 'rb') as f: resp = StreamResponse(headers={ 'CONTENT-DISPOSITION': 'attachment; filename="%s"' % filename }) resp.content_type = mimetypes.guess_type(filename) data = f.read() resp.content_length = len(data) await resp.prepare(self.request) resp.write(data) return resp
def test_remove_content_length_if_compression_enabled_http10(): writer = mock.Mock() def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH not in headers assert hdrs.TRANSFER_ENCODING not in headers writer.write_headers.side_effect = write_headers req = make_request('GET', '/', version=HttpVersion10, payload_writer=writer) resp = StreamResponse() resp.content_length = 123 resp.enable_compression(ContentCoding.gzip) yield from resp.prepare(req) assert resp.content_length is None
async def POST_Chunk(request): log.request(request) app = request.app params = request.rel_url.query put_points = False num_points = 0 if "count" in params: num_points = int(params["count"]) if "action" in params and params["action"] == "put": log.info("POST Chunk put points, num_points: {}".format(num_points)) put_points = True else: log.info("POST Chunk get points") s3path = None s3offset = 0 s3size = 0 if "s3path" in params: if put_points: log.error("s3path can not be used with put points POST request") raise HTTPBadRequest() s3path = params["s3path"] log.debug(f"GET_Chunk - using s3path: {s3path}") bucket = None elif "bucket" in params: bucket = params["bucket"] else: bucket = None if "s3offset" in params: try: s3offset = int(params["s3offset"]) except ValueError: log.error(f"invalid s3offset params: {params['s3offset']}") raise HTTPBadRequest() if "s3size" in params: try: s3size = int(params["s3size"]) except ValueError: log.error(f"invalid s3size params: {params['s3sieze']}") raise HTTPBadRequest() chunk_id = request.match_info.get('id') if not chunk_id: msg = "Missing chunk id" log.error(msg) raise HTTPBadRequest(reason=msg) log.info(f"POST chunk_id: {chunk_id}") chunk_index = getChunkIndex(chunk_id) log.debug(f"chunk_index: {chunk_index}") if not isValidUuid(chunk_id, "Chunk"): msg = f"Invalid chunk id: {chunk_id}" log.warn(msg) raise HTTPBadRequest(reason=msg) validateInPartition(app, chunk_id) log.debug("request params: {}".format(list(params.keys()))) if "dset" in params: msg = "Unexpected dset in POST request" log.error(msg) raise HTTPBadRequest(reason=msg) dset_id = getDatasetId(chunk_id) dset_json = await get_metadata_obj(app, dset_id, bucket=bucket) log.debug(f"dset_json: {dset_json}") chunk_layout = getChunkLayout(dset_json) chunk_coord = getChunkCoordinate(chunk_id, chunk_layout) log.debug(f"chunk_coord: {chunk_coord}") if not request.has_body: msg = "POST Value with no body" log.warn(msg) raise HTTPBadRequest(reason=msg) content_type = "application/octet-stream" if "Content-Type" in request.headers: # client should use "application/octet-stream" for binary transfer content_type = request.headers["Content-Type"] if content_type != "application/octet-stream": msg = "Unexpected content_type: {}".format(content_type) log.error(msg) raise HTTPBadRequest(reason=msg) type_json = dset_json["type"] dset_dtype = createDataType(type_json) log.debug(f"dtype: {dset_dtype}") dims = getChunkLayout(dset_json) log.debug(f"got dims: {dims}") rank = len(dims) if rank == 0: msg = "POST chunk request with no dimensions" log.error(msg) raise HTTPBadRequest(reason=msg) # create a numpy array for incoming points input_bytes = await request_read(request) if len(input_bytes) != request.content_length: msg = "Read {} bytes, expecting: {}".format(len(input_bytes), request.content_length) log.error(msg) raise HTTPInternalServerError() # get chunk from cache/s3. If not found init a new chunk if this is a write request chunk_arr = await getChunk(app, chunk_id, dset_json, bucket=bucket, s3path=s3path, s3offset=s3offset, s3size=s3size, chunk_init=put_points) if chunk_arr is None: if put_points: log.error("no array returned for put_points") raise HTTPInternalServerError() else: # get points on a non-existent S3 objects? log.warn("S3 object not found for get points") raise HTTPNotFound() if put_points: # writing point data # create a numpy array with the following type: # (coord1, coord2, ...) | dset_dtype if rank == 1: coord_type_str = "uint64" else: coord_type_str = "({},)uint64".format(rank) comp_dtype = np.dtype([("coord", np.dtype(coord_type_str)), ("value", dset_dtype)]) point_arr = np.fromstring(input_bytes, dtype=comp_dtype) if len(point_arr) != num_points: msg = "Unexpected size of point array, got: {} expected: {}".format( len(point_arr), num_points) log.warn(msg) raise HTTPBadRequest(reason=msg) for i in range(num_points): elem = point_arr[i] if rank == 1: coord = int(elem[0]) else: coord = tuple(elem[0]) # index to update val = elem[1] # value chunk_arr[coord] = val # update the point chunk_cache = app["chunk_cache"] chunk_cache.setDirty(chunk_id) # async write to S3 dirty_ids = app["dirty_ids"] now = int(time.time()) dirty_ids[chunk_id] = (now, bucket) log.info(f"set {chunk_id} to dirty") else: # reading point data point_dt = np.dtype('uint64') # use unsigned long for point index point_arr = np.fromstring( input_bytes, dtype=point_dt) # read points as unsigned longs if len(point_arr) % rank != 0: msg = "Unexpected size of point array" log.warn(msg) raise HTTPBadRequest(reason=msg) num_points = len(point_arr) // rank log.debug(f"got {num_points} points") point_arr = point_arr.reshape((num_points, rank)) output_arr = np.zeros((num_points, ), dtype=dset_dtype) for i in range(num_points): point = point_arr[i, :] tr_point = getChunkRelativePoint(chunk_coord, point) val = chunk_arr[tuple(tr_point)] output_arr[i] = val if put_points: # write empty response resp = json_response({}) else: # get data output_data = output_arr.tobytes() # write response try: resp = StreamResponse() resp.headers['Content-Type'] = "application/octet-stream" resp.content_length = len(output_data) await resp.prepare(request) await resp.write(output_data) except Exception as e: log.error(f"Exception during binary data write: {e}") raise HTTPInternalServerError() finally: await resp.write_eof() return resp
def test_content_length_setter(): resp = StreamResponse() resp.content_length = 234 assert 234 == resp.content_length
def test_enable_chunked_encoding_with_content_length(): resp = StreamResponse() resp.content_length = 234 with pytest.raises(RuntimeError): resp.enable_chunked_encoding()
def test_content_length_setter_with_enable_chunked_encoding() -> None: resp = StreamResponse() resp.enable_chunked_encoding() with pytest.raises(RuntimeError): resp.content_length = 234
def test_content_length_setter(self): req = self.make_request('GET', '/') resp = StreamResponse(req) resp.content_length = 234 self.assertEqual(234, resp.content_length)
async def GET_AttributeValue(request): """HTTP method to return an attribute value""" log.request(request) app = request.app log.info("GET_AttributeValue") collection = getRequestCollectionName( request) # returns datasets|groups|datatypes obj_id = request.match_info.get('id') if not obj_id: msg = "Missing object id" log.warn(msg) raise HTTPBadRequest(reason=msg) if not isValidUuid(obj_id, obj_class=collection): msg = f"Invalid object id: {obj_id}" log.warn(msg) raise HTTPBadRequest(reason=msg) attr_name = request.match_info.get('name') validateAttributeName(attr_name) username, pswd = getUserPasswordFromRequest(request) if username is None and app['allow_noauth']: username = "******" else: await validateUserPassword(app, username, pswd) domain = getDomainFromRequest(request) if not isValidDomain(domain): msg = f"Invalid domain value: {domain}" log.warn(msg) raise HTTPBadRequest(reason=msg) bucket = getBucketForDomain(domain) # get domain JSON domain_json = await getDomainJson(app, domain) if "root" not in domain_json: log.error(f"Expected root key for domain: {domain}") raise HTTPBadRequest(reason="Unexpected Error") # TBD - verify that the obj_id belongs to the given domain await validateAction(app, domain, obj_id, username, "read") req = getDataNodeUrl(app, obj_id) req += '/' + collection + '/' + obj_id + "/attributes/" + attr_name log.debug("get Attribute: " + req) params = {} if bucket: params["bucket"] = bucket dn_json = await http_get(app, req, params=params) log.debug("got attributes json from dn for obj_id: " + str(dn_json)) attr_shape = dn_json["shape"] log.debug(f"attribute shape: {attr_shape}") if attr_shape["class"] == 'H5S_NULL': msg = "Null space attributes can not be read" log.warn(msg) raise HTTPBadRequest(reason=msg) accept_type = getAcceptType(request) response_type = accept_type # will adjust later if binary not possible type_json = dn_json["type"] shape_json = dn_json["shape"] item_size = getItemSize(type_json) if item_size == 'H5T_VARIABLE' and accept_type != "json": msg = "Client requested binary, but only JSON is supported for variable length data types" log.info(msg) response_type = "json" if response_type == "binary": arr_dtype = createDataType(type_json) # np datatype np_shape = getShapeDims(shape_json) try: arr = jsonToArray(np_shape, arr_dtype, dn_json["value"]) except ValueError: msg = "Bad Request: input data doesn't match selection" log.warn(msg) raise HTTPBadRequest(reason=msg) output_data = arr.tobytes() log.debug( f"GET AttributeValue - returning {len(output_data)} bytes binary data" ) cors_domain = config.get("cors_domain") # write response try: resp = StreamResponse() resp.content_type = "application/octet-stream" resp.content_length = len(output_data) # allow CORS if cors_domain: resp.headers['Access-Control-Allow-Origin'] = cors_domain resp.headers[ 'Access-Control-Allow-Methods'] = "GET, POST, DELETE, PUT, OPTIONS" resp.headers[ 'Access-Control-Allow-Headers'] = "Content-Type, api_key, Authorization" await resp.prepare(request) await resp.write(output_data) except Exception as e: log.error(f"Got exception: {e}") raise HTTPInternalServerError() finally: await resp.write_eof() else: resp_json = {} if "value" in dn_json: resp_json["value"] = dn_json["value"] hrefs = [] obj_uri = '/' + collection + '/' + obj_id attr_uri = obj_uri + '/attributes/' + attr_name hrefs.append({'rel': 'self', 'href': getHref(request, attr_uri)}) hrefs.append({'rel': 'home', 'href': getHref(request, '/')}) hrefs.append({'rel': 'owner', 'href': getHref(request, obj_uri)}) resp_json["hrefs"] = hrefs resp = await jsonResponse(request, resp_json) log.response(request, resp=resp) return resp
async def POST_Chunk(request): log.request(request) app = request.app params = request.rel_url.query put_points = False num_points = 0 if "count" not in params: log.warn("expected count param") raise HTTPBadRequest() if "count" in params: num_points = int(params["count"]) if "action" in params and params["action"] == "put": log.info(f"POST Chunk put points - num_points: {num_points}") put_points = True else: log.info(f"POST Chunk get points - num_points: {num_points}") s3path = None s3offset = 0 s3size = 0 if "s3path" in params: if put_points: log.error("s3path can not be used with put points POST request") raise HTTPBadRequest() s3path = params["s3path"] log.debug(f"GET_Chunk - using s3path: {s3path}") bucket = None elif "bucket" in params: bucket = params["bucket"] else: bucket = None if "s3offset" in params: try: s3offset = int(params["s3offset"]) except ValueError: log.error(f"invalid s3offset params: {params['s3offset']}") raise HTTPBadRequest() if "s3size" in params: try: s3size = int(params["s3size"]) except ValueError: log.error(f"invalid s3size params: {params['s3sieze']}") raise HTTPBadRequest() chunk_id = request.match_info.get('id') if not chunk_id: msg = "Missing chunk id" log.error(msg) raise HTTPBadRequest(reason=msg) log.info(f"POST chunk_id: {chunk_id}") chunk_index = getChunkIndex(chunk_id) log.debug(f"chunk_index: {chunk_index}") if not isValidUuid(chunk_id, "Chunk"): msg = f"Invalid chunk id: {chunk_id}" log.warn(msg) raise HTTPBadRequest(reason=msg) try: validateInPartition(app, chunk_id) except KeyError: msg = f"invalid partition for obj id: {chunk_id}" log.error(msg) raise HTTPInternalServerError() log.debug(f"request params: {list(params.keys())}") if "dset" in params: msg = "Unexpected dset in POST request" log.error(msg) raise HTTPBadRequest(reason=msg) if not request.has_body: msg = "POST Value with no body" log.warn(msg) raise HTTPBadRequest(reason=msg) content_type = "application/octet-stream" if "Content-Type" in request.headers: # client should use "application/octet-stream" for binary transfer content_type = request.headers["Content-Type"] if content_type != "application/octet-stream": msg = f"Unexpected content_type: {content_type}" log.error(msg) raise HTTPBadRequest(reason=msg) dset_id = getDatasetId(chunk_id) dset_json = await get_metadata_obj(app, dset_id, bucket=bucket) dims = getChunkLayout(dset_json) rank = len(dims) type_json = dset_json["type"] dset_dtype = createDataType(type_json) # create a numpy array for incoming points input_bytes = await request_read(request) if len(input_bytes) != request.content_length: msg = f"Read {len(input_bytes)} bytes, expecting: {request.content_length}" log.error(msg) raise HTTPInternalServerError() if rank == 1: coord_type_str = "uint64" else: coord_type_str = f"({rank},)uint64" if put_points: # create a numpy array with the following type: # (coord1, coord2, ...) | dset_dtype point_dt = np.dtype([("coord", np.dtype(coord_type_str)), ("value", dset_dtype)]) point_shape = (num_points, ) chunk_init = True else: point_dt = np.dtype('uint64') point_shape = (num_points, rank) chunk_init = False point_arr = bytesToArray(input_bytes, point_dt, point_shape) chunk_arr = await get_chunk(app, chunk_id, dset_json, bucket=bucket, s3path=s3path, s3offset=s3offset, s3size=s3size, chunk_init=chunk_init) if chunk_arr is None: log.warn(f"chunk {chunk_id} not found") raise HTTPNotFound() if put_points: # writing point data try: chunkWritePoints(chunk_id=chunk_id, chunk_layout=dims, chunk_arr=chunk_arr, point_arr=point_arr) except ValueError as ve: log.warn(f"got value error from chunkWritePoints: {ve}") raise HTTPBadRequest() # write empty response resp = json_response({}) save_chunk(app, chunk_id, dset_json, bucket=bucket) # lazily write chunk to storage else: # read points try: output_arr = chunkReadPoints(chunk_id=chunk_id, chunk_layout=dims, chunk_arr=chunk_arr, point_arr=point_arr) except ValueError as ve: log.warn(f"got value error from chunkReadPoints: {ve}") raise HTTPBadRequest() output_data = arrayToBytes(output_arr) # write response try: resp = StreamResponse() resp.headers['Content-Type'] = "application/octet-stream" resp.content_length = len(output_data) await resp.prepare(request) await resp.write(output_data) except Exception as e: log.error(f"Exception during binary data write: {e}") raise HTTPInternalServerError() finally: await resp.write_eof() return resp
def test_enable_chunked_encoding_with_content_length() -> None: resp = StreamResponse() resp.content_length = 234 with pytest.raises(RuntimeError): resp.enable_chunked_encoding()
def test_content_length_setter(self): resp = StreamResponse() resp.content_length = 234 self.assertEqual(234, resp.content_length)
async def GET_Chunk(request): log.request(request) app = request.app params = request.rel_url.query chunk_id = request.match_info.get('id') if not chunk_id: msg = "Missing chunk id" log.error(msg) raise HTTPBadRequest(reason=msg) if not isValidUuid(chunk_id, "Chunk"): msg = f"Invalid chunk id: {chunk_id}" log.warn(msg) raise HTTPBadRequest(reason=msg) validateInPartition(app, chunk_id) log.debug("request params: {}".format(list(params.keys()))) s3path = None s3offset = 0 s3size = 0 bucket = None if "s3path" in params: s3path = params["s3path"] log.debug(f"GET_Chunk - using s3path: {s3path}") elif "bucket" in params: bucket = params["bucket"] else: bucket = None if "s3offset" in params: try: s3offset = int(params["s3offset"]) except ValueError: log.error(f"invalid s3offset params: {params['s3offset']}") raise HTTPBadRequest() if "s3size" in params: try: s3size = int(params["s3size"]) except ValueError: log.error(f"invalid s3size params: {params['s3sieze']}") raise HTTPBadRequest() if "dset" in params: msg = "Unexpected dset in GET request" log.error(msg) raise HTTPBadRequest(reason=msg) dset_id = getDatasetId(chunk_id) dset_json = await get_metadata_obj(app, dset_id, bucket=bucket) log.debug(f"dset_json: {dset_json}") type_json = dset_json["type"] dims = getChunkLayout(dset_json) log.debug("got dims: {}".format(dims)) rank = len(dims) # get chunk selection from query params if "select" in params: log.debug("select: {}".format(params["select"])) selection = [] for i in range(rank): dim_slice = getSliceQueryParam(request, i, dims[i]) selection.append(dim_slice) selection = tuple(selection) log.debug(f"got selection: {selection}") dt = createDataType(type_json) log.debug(f"dtype: {dt}") rank = len(dims) if rank == 0: msg = "No dimension passed to GET chunk request" log.error(msg) raise HTTPBadRequest(reason=msg) if len(selection) != rank: msg = "Selection rank does not match shape rank" log.error(msg) raise HTTPBadRequest(reason=msg) for i in range(rank): s = selection[i] log.debug("selection[{}]: {}".format(i, s)) chunk_arr = await getChunk(app, chunk_id, dset_json, bucket=bucket, s3path=s3path, s3offset=s3offset, s3size=s3size) if chunk_arr is None: # return a 404 msg = "Chunk {} does not exist".format(chunk_id) log.info(msg) raise HTTPNotFound() resp = None if "query" in params: # do query selection query = params["query"] log.info(f"query: {query}") if rank != 1: msg = "Query selection only supported for one dimensional arrays" log.warn(msg) raise HTTPBadRequest(reason=msg) limit = 0 if "Limit" in params: limit = int(params["Limit"]) values = [] indices = [] field_names = [] if dt.fields: field_names = list(dt.fields.keys()) x = chunk_arr[selection] log.debug("x: {}".format(x)) eval_str = getEvalStr(query, "x", field_names) log.debug("eval_str: {}".format(eval_str)) where_result = np.where(eval(eval_str)) log.debug("where_result: {}".format(where_result)) where_result_index = where_result[0] log.debug("whare_result index: {}".format(where_result_index)) log.debug("boolean selection: {}".format(x[where_result_index])) s = selection[0] count = 0 for index in where_result_index: log.debug("index: {}".format(index)) value = x[index].tolist() log.debug("value: {}".format(value)) json_val = bytesArrayToList(value) log.debug("json_value: {}".format(json_val)) json_index = index.tolist( ) * s.step + s.start # adjust for selection indices.append(json_index) values.append(json_val) count += 1 if limit > 0 and count >= limit: log.info("got limit items") break query_result = {} query_result["index"] = indices query_result["value"] = values log.info(f"query_result retiurning: {len(indices)} rows") log.debug(f"query_result: {query_result}") resp = json_response(query_result) else: # get requested data output_arr = chunk_arr[selection] output_data = arrayToBytes(output_arr) # write response try: resp = StreamResponse() resp.headers['Content-Type'] = "application/octet-stream" resp.content_length = len(output_data) await resp.prepare(request) await resp.write(output_data) except Exception as e: log.error(f"Exception during binary data write: {e}") raise HTTPInternalServerError() finally: await resp.write_eof() return resp
def test_content_length_setter() -> None: resp = StreamResponse() resp.content_length = 234 assert 234 == resp.content_length
async def GET_Chunk(request): log.request(request) app = request.app params = request.rel_url.query chunk_id = request.match_info.get('id') if not chunk_id: msg = "Missing chunk id" log.error(msg) raise HTTPBadRequest(reason=msg) if not isValidUuid(chunk_id, "Chunk"): msg = f"Invalid chunk id: {chunk_id}" log.warn(msg) raise HTTPBadRequest(reason=msg) validateInPartition(app, chunk_id) log.debug(f"request params: {params.keys()}") bucket = None s3path = None s3offset = None s3size = None query = None limit = 0 if "s3path" in params: s3path = params["s3path"] log.debug(f"GET_Chunk - using s3path: {s3path}") elif "bucket" in params: bucket = params["bucket"] if "s3offset" in params: try: s3offset = int(params["s3offset"]) except ValueError: log.error(f"invalid s3offset params: {params['s3offset']}") raise HTTPBadRequest() if "s3size" in params: try: s3size = int(params["s3size"]) except ValueError: log.error(f"invalid s3size params: {params['s3sieze']}") raise HTTPBadRequest() if "query" in params: query = params["query"] if "Limit" in params: limit = int(params["Limit"]) dset_id = getDatasetId(chunk_id) dset_json = await get_metadata_obj(app, dset_id, bucket=bucket) dims = getChunkLayout(dset_json) log.debug(f"got dims: {dims}") rank = len(dims) # get chunk selection from query params selection = [] for i in range(rank): dim_slice = getSliceQueryParam(request, i, dims[i]) selection.append(dim_slice) selection = tuple(selection) log.debug(f"got selection: {selection}") chunk_arr = await get_chunk(app, chunk_id, dset_json, bucket=bucket, s3path=s3path, s3offset=s3offset, s3size=s3size, chunk_init=False) if chunk_arr is None: msg = f"chunk {chunk_id} not found" log.warn(msg) raise HTTPNotFound() if query: # run given query try: read_resp = chunkQuery(chunk_id=chunk_id, chunk_layout=dims, chunk_arr=chunk_arr, slices=selection, query=query, limit=limit, return_json=True) except TypeError as te: log.warn(f"chunkQuery - TypeError: {te}") raise HTTPBadRequest() except ValueError as ve: log.warn(f"chunkQuery - ValueError: {ve}") raise HTTPBadRequest() else: # read selected data from chunk output_arr = chunkReadSelection(chunk_arr, slices=selection) read_resp = arrayToBytes(output_arr) # write response if isinstance(read_resp, bytes): try: resp = StreamResponse() resp.headers['Content-Type'] = "application/octet-stream" resp.content_length = len(read_resp) await resp.prepare(request) await resp.write(read_resp) except Exception as e: log.error(f"Exception during binary data write: {e}") raise HTTPInternalServerError() finally: await resp.write_eof() else: # JSON response resp = json_response(read_resp) return resp
def test_content_length_setter_with_enable_chunked_encoding(): resp = StreamResponse() resp.enable_chunked_encoding() with pytest.raises(RuntimeError): resp.content_length = 234
async def image_proxy(request, url, referer=None): if not referer or is_referer_force_url(url): referer = get_referer_of_url(url) LOG.info(f'proxy image {url} referer={referer}') session = response = None async def do_cleanup(): nonlocal session, response if response: response.close() if session: await session.close() try: await check_private_address(url) headers = {'User-Agent': DEFAULT_USER_AGENT} for h in PROXY_REQUEST_HEADERS: if h in request.headers: headers[h] = request.headers[h] referer_headers = dict(headers) referer_headers['Referer'] = referer request_timeout = 30 session = aiohttp_client_session( auto_decompress=False, timeout=request_timeout, ) # 先尝试发带Referer的请求,不行再尝试不带Referer response = await get_response(session, url, referer_headers) if response.status in REFERER_DENY_STATUS: LOG.info(f'proxy image {url} referer={referer} ' f'failed {response.status}, will try without referer') response.close() response = await get_response(session, response.url, headers) is_chunked = response.headers.get('Transfer-Encoding', '').lower() == 'chunked' # using chunked encoding is forbidden for HTTP/1.0 if is_chunked and request.version < HttpVersion11: version = 'HTTP/{0.major}.{0.minor}'.format(request.version) error_msg = f"using chunked encoding is forbidden for {version}" LOG.info(f'proxy image {url} referer={referer} failed: {error_msg}') response.close() raise ImageProxyError(error_msg) except ImageProxyError as ex: await do_cleanup() return ex.to_response() except Exception: await do_cleanup() raise try: my_response = StreamResponse(status=response.status) # 'Content-Length', 'Content-Type', 'Transfer-Encoding' if is_chunked: my_response.enable_chunked_encoding() elif response.headers.get('Transfer-Encoding'): my_response.headers['Transfer-Encoding'] = response.headers['Transfer-Encoding'] if response.headers.get('Content-Length'): content_length = int(response.headers['Content-Length']) if content_length > MAX_IMAGE_SIZE: return json_response({'message': 'image too large'}, status=413) my_response.content_length = content_length if response.headers.get('Content-Type'): my_response.content_type = response.headers['Content-Type'] for h in PROXY_RESPONSE_HEADERS: if h in response.headers: my_response.headers[h] = response.headers[h] await my_response.prepare(request) except Exception: await do_cleanup() raise try: content_length = 0 async for chunk in response.content.iter_chunked(8 * 1024): content_length += len(chunk) if content_length > MAX_IMAGE_SIZE: LOG.warning(f'image too large, abort the response, url={url}') my_response.force_close() break await my_response.write(chunk) await my_response.write_eof() finally: await do_cleanup() return my_response