async def POST_Chunk(request): log.request(request) app = request.app params = request.rel_url.query put_points = False num_points = 0 if "count" in params: num_points = int(params["count"]) if "action" in params and params["action"] == "put": log.info(f"POST Chunk put points, num_points: {num_points}") put_points = True else: log.info("POST Chunk get points") s3path = None s3offset = 0 s3size = 0 if "s3path" in params: if put_points: log.error("s3path can not be used with put points POST request") raise HTTPBadRequest() s3path = params["s3path"] log.debug(f"GET_Chunk - using s3path: {s3path}") bucket = None elif "bucket" in params: bucket = params["bucket"] else: bucket = None if "s3offset" in params: try: s3offset = int(params["s3offset"]) except ValueError: log.error(f"invalid s3offset params: {params['s3offset']}") raise HTTPBadRequest() if "s3size" in params: try: s3size = int(params["s3size"]) except ValueError: log.error(f"invalid s3size params: {params['s3sieze']}") raise HTTPBadRequest() chunk_id = request.match_info.get('id') if not chunk_id: msg = "Missing chunk id" log.error(msg) raise HTTPBadRequest(reason=msg) log.info(f"POST chunk_id: {chunk_id}") chunk_index = getChunkIndex(chunk_id) log.debug(f"chunk_index: {chunk_index}") if not isValidUuid(chunk_id, "Chunk"): msg = f"Invalid chunk id: {chunk_id}" log.warn(msg) raise HTTPBadRequest(reason=msg) validateInPartition(app, chunk_id) log.debug(f"request params: {list(params.keys())}") if "dset" in params: msg = "Unexpected dset in POST request" log.error(msg) raise HTTPBadRequest(reason=msg) dset_id = getDatasetId(chunk_id) dset_json = await get_metadata_obj(app, dset_id, bucket=bucket) log.debug(f"dset_json: {dset_json}") chunk_layout = getChunkLayout(dset_json) chunk_coord = getChunkCoordinate(chunk_id, chunk_layout) log.debug(f"chunk_coord: {chunk_coord}") if not request.has_body: msg = "POST Value with no body" log.warn(msg) raise HTTPBadRequest(reason=msg) content_type = "application/octet-stream" if "Content-Type" in request.headers: # client should use "application/octet-stream" for binary transfer content_type = request.headers["Content-Type"] if content_type != "application/octet-stream": msg = f"Unexpected content_type: {content_type}" log.error(msg) raise HTTPBadRequest(reason=msg) type_json = dset_json["type"] dset_dtype = createDataType(type_json) log.debug(f"dtype: {dset_dtype}") dims = getChunkLayout(dset_json) log.debug(f"got dims: {dims}") rank = len(dims) if rank == 0: msg = "POST chunk request with no dimensions" log.error(msg) raise HTTPBadRequest(reason=msg) # create a numpy array for incoming points input_bytes = await request_read(request) if len(input_bytes) != request.content_length: msg = f"Read {len(input_bytes)} bytes, expecting: {request.content_length}" log.error(msg) raise HTTPInternalServerError() # get chunk from cache/s3. If not found init a new chunk if this is a write request chunk_arr = await getChunk(app, chunk_id, dset_json, bucket=bucket, s3path=s3path, s3offset=s3offset, s3size=s3size, chunk_init=put_points) if chunk_arr is None: if put_points: log.error("no array returned for put_points") raise HTTPInternalServerError() else: # get points on a non-existent S3 objects? log.warn("S3 object not found for get points") raise HTTPNotFound() log.debug(f"chunk_arr.shape: {chunk_arr.shape}") if put_points: # writing point data # create a numpy array with the following type: # (coord1, coord2, ...) | dset_dtype if rank == 1: coord_type_str = "uint64" else: coord_type_str = f"({rank},)uint64" comp_dtype = np.dtype([("coord", np.dtype(coord_type_str)), ("value", dset_dtype)]) point_arr = np.fromstring(input_bytes, dtype=comp_dtype) if len(point_arr) != num_points: msg = f"Unexpected size of point array, got: {len(point_arr)} expected: {num_points}" log.warn(msg) raise HTTPBadRequest(reason=msg) for i in range(num_points): elem = point_arr[i] log.debug(f"non-relative coordinate: {elem}") if rank == 1: coord = int(elem[0]) coord = coord % chunk_layout[0] # adjust to chunk relative else: coord = elem[0] # index to update for dim in range(rank): # adjust to chunk relative coord[dim] = int(coord[dim]) % chunk_layout[dim] coord = tuple(coord) # need to convert to a tuple log.debug(f"relative coordinate: {coord}") val = elem[1] # value try: chunk_arr[coord] = val # update the point except IndexError: msg = "Out of bounds point index for POST Chunk" log.warn(msg) raise HTTPBadRequest(reason=msg) chunk_cache = app["chunk_cache"] chunk_cache.setDirty(chunk_id) # async write to S3 dirty_ids = app["dirty_ids"] now = int(time.time()) dirty_ids[chunk_id] = (now, bucket) log.info(f"set {chunk_id} to dirty") else: # reading point data point_dt = np.dtype('uint64') # use unsigned long for point index point_arr = np.fromstring( input_bytes, dtype=point_dt) # read points as unsigned longs if len(point_arr) % rank != 0: msg = "Unexpected size of point array" log.warn(msg) raise HTTPBadRequest(reason=msg) num_points = len(point_arr) // rank log.debug(f"got {num_points} points") point_arr = point_arr.reshape((num_points, rank)) output_arr = np.zeros((num_points, ), dtype=dset_dtype) for i in range(num_points): point = point_arr[i, :] tr_point = getChunkRelativePoint(chunk_coord, point) val = chunk_arr[tuple(tr_point)] output_arr[i] = val if put_points: # write empty response resp = json_response({}) else: # get data output_data = output_arr.tobytes() # write response try: resp = StreamResponse() resp.headers['Content-Type'] = "application/octet-stream" resp.content_length = len(output_data) await resp.prepare(request) await resp.write(output_data) except Exception as e: log.error(f"Exception during binary data write: {e}") raise HTTPInternalServerError() finally: await resp.write_eof() return resp
async def POST_Chunk(request): log.request(request) app = request.app params = request.rel_url.query put_points = False num_points = 0 if "count" in params: num_points = int(params["count"]) if "action" in params and params["action"] == "put": log.info("POST Chunk put points, num_points: {}".format(num_points)) put_points = True else: log.info("POST Chunk get points") chunk_id = request.match_info.get('id') if not chunk_id: msg = "Missing chunk id" log.error(msg) raise HTTPBadRequest(reason=msg) log.info("POST chunk_id: {}".format(chunk_id)) chunk_index = getChunkIndex(chunk_id) log.debug("chunk_index: {}".format(chunk_index)) if not isValidUuid(chunk_id, "Chunk"): msg = "Invalid chunk id: {}".format(chunk_id) log.warn(msg) raise HTTPBadRequest(reason=msg) validateInPartition(app, chunk_id) log.debug("request params: {}".format(list(params.keys()))) if "dset" not in params: msg = "Missing dset in GET request" log.error(msg) raise HTTPBadRequest(reason=msg) dset_json = json.loads(params["dset"]) log.debug("dset_json: {}".format(dset_json)) chunk_layout = getChunkLayout(dset_json) chunk_coord = getChunkCoordinate(chunk_id, chunk_layout) log.debug("chunk_coord: {}".format(chunk_coord)) if not request.has_body: msg = "POST Value with no body" log.warn(msg) raise HTTPBadRequest(reason=msg) content_type = "application/octet-stream" if "Content-Type" in request.headers: # client should use "application/octet-stream" for binary transfer content_type = request.headers["Content-Type"] if content_type != "application/octet-stream": msg = "Unexpected content_type: {}".format(content_type) log.error(msg) raise HTTPBadRequest(reason=msg) type_json = dset_json["type"] dset_dtype = createDataType(type_json) log.debug("dtype: {}".format(dset_dtype)) dims = getChunkLayout(dset_json) log.debug("got dims: {}".format(dims)) rank = len(dims) if rank == 0: msg = "POST chunk request with no dimensions" log.error(msg) raise HTTPBadRequest(reason=msg) # create a numpy array for incoming points input_bytes = await request_read(request) if len(input_bytes) != request.content_length: msg = "Read {} bytes, expecting: {}".format(len(input_bytes), request.content_length) log.error(msg) raise HTTPInternalServerError() # get chunk from cache/s3. If not found init a new chunk if this is a write request chunk_arr = await getChunk(app, chunk_id, dset_json, chunk_init=put_points) if put_points: # writing point data # create a numpy array with the following type: # (coord1, coord2, ...) | dset_dtype if rank == 1: coord_type_str = "uint64" else: coord_type_str = "({},)uint64".format(rank) comp_dtype = np.dtype([("coord", np.dtype(coord_type_str)), ("value", dset_dtype)]) point_arr = np.fromstring(input_bytes, dtype=comp_dtype) if len(point_arr) != num_points: msg = "Unexpected size of point array, got: {} expected: {}".format(len(point_arr), num_points) log.warn(msg) raise HTTPBadRequest(reason=msg) for i in range(num_points): elem = point_arr[i] if rank == 1: coord = int(elem[0]) else: coord = tuple(elem[0]) # index to update val = elem[1] # value chunk_arr[coord] = val # update the point chunk_cache = app["chunk_cache"] chunk_cache.setDirty(chunk_id) # async write to S3 dirty_ids = app["dirty_ids"] now = int(time.time()) dirty_ids[chunk_id] = now log.info("set {} to dirty".format(chunk_id)) else: # reading point data point_dt = np.dtype('uint64') # use unsigned long for point index point_arr = np.fromstring(input_bytes, dtype=point_dt) # read points as unsigned longs if len(point_arr) % rank != 0: msg = "Unexpected size of point array" log.warn(msg) raise HTTPBadRequest(reason=msg) num_points = len(point_arr) // rank log.debug("got {} points".format(num_points)) point_arr = point_arr.reshape((num_points, rank)) output_arr = np.zeros((num_points,), dtype=dset_dtype) for i in range(num_points): point = point_arr[i,:] tr_point = getChunkRelativePoint(chunk_coord, point) val = chunk_arr[tuple(tr_point)] output_arr[i] = val if put_points: # write empty response resp = json_response({}) else: # get data output_data = output_arr.tobytes() # write response try: resp = StreamResponse() resp.headers['Content-Type'] = "application/octet-stream" resp.content_length = len(output_data) await resp.prepare(request) await resp.write(output_data) except Exception as e: log.error(f"Exception during binary data write: {e}") raise HTTPInternalServerError() finally: await resp.write_eof() return resp
async def POST_Chunk(request): log.request(request) app = request.app params = request.rel_url.query put_points = False num_points = 0 if "count" not in params: log.warn("expected count param") raise HTTPBadRequest() if "count" in params: num_points = int(params["count"]) if "action" in params and params["action"] == "put": log.info(f"POST Chunk put points - num_points: {num_points}") put_points = True else: log.info(f"POST Chunk get points - num_points: {num_points}") s3path = None s3offset = 0 s3size = 0 if "s3path" in params: if put_points: log.error("s3path can not be used with put points POST request") raise HTTPBadRequest() s3path = params["s3path"] log.debug(f"GET_Chunk - using s3path: {s3path}") bucket = None elif "bucket" in params: bucket = params["bucket"] else: bucket = None if "s3offset" in params: try: s3offset = int(params["s3offset"]) except ValueError: log.error(f"invalid s3offset params: {params['s3offset']}") raise HTTPBadRequest() if "s3size" in params: try: s3size = int(params["s3size"]) except ValueError: log.error(f"invalid s3size params: {params['s3sieze']}") raise HTTPBadRequest() chunk_id = request.match_info.get('id') if not chunk_id: msg = "Missing chunk id" log.error(msg) raise HTTPBadRequest(reason=msg) log.info(f"POST chunk_id: {chunk_id}") chunk_index = getChunkIndex(chunk_id) log.debug(f"chunk_index: {chunk_index}") if not isValidUuid(chunk_id, "Chunk"): msg = f"Invalid chunk id: {chunk_id}" log.warn(msg) raise HTTPBadRequest(reason=msg) validateInPartition(app, chunk_id) log.debug(f"request params: {list(params.keys())}") if "dset" in params: msg = "Unexpected dset in POST request" log.error(msg) raise HTTPBadRequest(reason=msg) if not request.has_body: msg = "POST Value with no body" log.warn(msg) raise HTTPBadRequest(reason=msg) content_type = "application/octet-stream" if "Content-Type" in request.headers: # client should use "application/octet-stream" for binary transfer content_type = request.headers["Content-Type"] if content_type != "application/octet-stream": msg = f"Unexpected content_type: {content_type}" log.error(msg) raise HTTPBadRequest(reason=msg) dset_id = getDatasetId(chunk_id) dset_json = await get_metadata_obj(app, dset_id, bucket=bucket) dims = getChunkLayout(dset_json) rank = len(dims) type_json = dset_json["type"] dset_dtype = createDataType(type_json) # create a numpy array for incoming points input_bytes = await request_read(request) if len(input_bytes) != request.content_length: msg = f"Read {len(input_bytes)} bytes, expecting: {request.content_length}" log.error(msg) raise HTTPInternalServerError() if rank == 1: coord_type_str = "uint64" else: coord_type_str = f"({rank},)uint64" if put_points: # create a numpy array with the following type: # (coord1, coord2, ...) | dset_dtype point_dt = np.dtype([("coord", np.dtype(coord_type_str)), ("value", dset_dtype)]) point_shape = (num_points,) chunk_init = True else: point_dt = np.dtype('uint64') point_shape = (num_points, rank) chunk_init = False point_arr = bytesToArray(input_bytes, point_dt, point_shape) chunk_arr = await get_chunk(app, chunk_id, dset_json, bucket=bucket, s3path=s3path, s3offset=s3offset, s3size=s3size, chunk_init=chunk_init) if chunk_arr is None: log.warn(f"chunk {chunk_id} not found") raise HTTPNotFound() if put_points: # writing point data try: chunkWritePoints(chunk_id=chunk_id, chunk_layout=dims, chunk_arr=chunk_arr, point_arr=point_arr) except ValueError as ve: log.warn(f"got value error from chunkWritePoints: {ve}") raise HTTPBadRequest() # write empty response resp = json_response({}) save_chunk(app, chunk_id, bucket=bucket) # lazily write chunk to storage else: # read points try: output_arr = chunkReadPoints(chunk_id=chunk_id, chunk_layout=dims, chunk_arr=chunk_arr, point_arr=point_arr) except ValueError as ve: log.warn(f"got value error from chunkReadPoints: {ve}") raise HTTPBadRequest() output_data = arrayToBytes(output_arr) # write response try: resp = StreamResponse() resp.headers['Content-Type'] = "application/octet-stream" resp.content_length = len(output_data) await resp.prepare(request) await resp.write(output_data) except Exception as e: log.error(f"Exception during binary data write: {e}") raise HTTPInternalServerError() finally: await resp.write_eof() return resp