def __init__(self, message, info): self.info = info InvalidUsage.__init__(self, message=message)
def get(tag): if tag not in json: raise InvalidUsage('JSON is missing: \'{}\''.format(tag)) return request.json[tag].strip().upper()
def _get_mixer(request, id): if id not in request[ 'session'].mixers or request['session'].mixers[id] is None: raise InvalidUsage('no such mixer ID') return request['session'].mixers[id]
async def _handler(request, file_uri=None): # Using this to determine if the URL is trying to break out of the path # served. os.path.realpath seems to be very slow if file_uri and '../' in file_uri: raise InvalidUsage("Invalid URL") # Merge served directory and requested file if provided # Strip all / that in the beginning of the URL to help prevent python # from herping a derp and treating the uri as an absolute path root_path = file_path = file_or_directory if file_uri: file_path = path.join(file_or_directory, sub('^[/]*', '', file_uri)) # URL decode the path sent by the browser otherwise we won't be able to # match filenames which got encoded (filenames with spaces etc) file_path = path.abspath(unquote(file_path)) if not file_path.startswith(path.abspath(unquote(root_path))): raise FileNotFound('File not found', path=file_or_directory, relative_url=file_uri) try: headers = {} # Check if the client has been sent this file before # and it has not been modified since stats = None if use_modified_since: stats = await stat(file_path) modified_since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(stats.st_mtime)) if request.headers.get('If-Modified-Since') == modified_since: return HTTPResponse(status=304) headers['Last-Modified'] = modified_since _range = None if use_content_range: _range = None if not stats: stats = await stat(file_path) headers['Accept-Ranges'] = 'bytes' headers['Content-Length'] = str(stats.st_size) if request.method != 'HEAD': try: _range = ContentRangeHandler(request, stats) except HeaderNotFound: pass else: del headers['Content-Length'] for key, value in _range.headers.items(): headers[key] = value if request.method == 'HEAD': return HTTPResponse(headers=headers, content_type=guess_type(file_path)[0] or 'text/plain') else: if stream_large_files: if isinstance(stream_large_files, int): threshold = stream_large_files else: threshold = 1024 * 1024 if not stats: stats = await stat(file_path) if stats.st_size >= threshold: return await file_stream(file_path, headers=headers, _range=_range) return await file(file_path, headers=headers, _range=_range) except ContentRangeError: raise except Exception: raise FileNotFound('File not found', path=file_or_directory, relative_url=file_uri)
async def _static_request_handler( self, file_or_directory, use_modified_since, use_content_range, stream_large_files, request, content_type=None, __file_uri__=None, ): # Using this to determine if the URL is trying to break out of the path # served. os.path.realpath seems to be very slow if __file_uri__ and "../" in __file_uri__: raise InvalidUsage("Invalid URL") # Merge served directory and requested file if provided # Strip all / that in the beginning of the URL to help prevent python # from herping a derp and treating the uri as an absolute path root_path = file_path = file_or_directory if __file_uri__: file_path = path.join(file_or_directory, sub("^[/]*", "", __file_uri__)) # URL decode the path sent by the browser otherwise we won't be able to # match filenames which got encoded (filenames with spaces etc) file_path = path.abspath(unquote(file_path)) if not file_path.startswith(path.abspath(unquote(root_path))): error_logger.exception( f"File not found: path={file_or_directory}, " f"relative_url={__file_uri__}") raise FileNotFound( "File not found", path=file_or_directory, relative_url=__file_uri__, ) try: headers = {} # Check if the client has been sent this file before # and it has not been modified since stats = None if use_modified_since: stats = await stat_async(file_path) modified_since = strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime(stats.st_mtime)) if request.headers.get("If-Modified-Since") == modified_since: return HTTPResponse(status=304) headers["Last-Modified"] = modified_since _range = None if use_content_range: _range = None if not stats: stats = await stat_async(file_path) headers["Accept-Ranges"] = "bytes" headers["Content-Length"] = str(stats.st_size) if request.method != "HEAD": try: _range = ContentRangeHandler(request, stats) except HeaderNotFound: pass else: del headers["Content-Length"] for key, value in _range.headers.items(): headers[key] = value if "content-type" not in headers: content_type = (content_type or guess_type(file_path)[0] or DEFAULT_HTTP_CONTENT_TYPE) if "charset=" not in content_type and ( content_type.startswith("text/") or content_type == "application/javascript"): content_type += "; charset=utf-8" headers["Content-Type"] = content_type if request.method == "HEAD": return HTTPResponse(headers=headers) else: if stream_large_files: if type(stream_large_files) == int: threshold = stream_large_files else: threshold = 1024 * 1024 if not stats: stats = await stat_async(file_path) if stats.st_size >= threshold: return await file_stream(file_path, headers=headers, _range=_range) return await file(file_path, headers=headers, _range=_range) except ContentRangeError: raise except Exception: error_logger.exception( f"File not found: path={file_or_directory}, " f"relative_url={__file_uri__}") raise FileNotFound( "File not found", path=file_or_directory, relative_url=__file_uri__, )
async def http1_request_header(self): """ Receive and parse request header into self.request. """ HEADER_MAX_SIZE = min(8192, self.request_max_size) # Receive until full header is in buffer buf = self.recv_buffer pos = 0 while True: pos = buf.find(b"\r\n\r\n", pos) if pos != -1: break pos = max(0, len(buf) - 3) if pos >= HEADER_MAX_SIZE: break await self._receive_more() if pos >= HEADER_MAX_SIZE: raise PayloadTooLarge("Request header exceeds the size limit") # Parse header content try: head = buf[:pos] raw_headers = head.decode(errors="surrogateescape") reqline, *split_headers = raw_headers.split("\r\n") method, self.url, protocol = reqline.split(" ") if protocol == "HTTP/1.1": self.keep_alive = True elif protocol == "HTTP/1.0": self.keep_alive = False else: raise Exception # Raise a Bad Request on try-except self.head_only = method.upper() == "HEAD" request_body = False headers = [] for name, value in (h.split(":", 1) for h in split_headers): name, value = h = name.lower(), value.lstrip() if name in ("content-length", "transfer-encoding"): request_body = True elif name == "connection": self.keep_alive = value.lower() == "keep-alive" headers.append(h) except Exception: raise InvalidUsage("Bad Request") headers_instance = Header(headers) self.upgrade_websocket = (headers_instance.get( "upgrade", "").lower() == "websocket") # Prepare a Request object request = self.protocol.request_class( url_bytes=self.url.encode(), headers=headers_instance, head=bytes(head), version=protocol[5:], method=method, transport=self.protocol.transport, app=self.protocol.app, ) # Prepare for request body self.request_bytes_left = self.request_bytes = 0 if request_body: headers = request.headers expect = headers.get("expect") if expect is not None: if expect.lower() == "100-continue": self.expecting_continue = True else: raise HeaderExpectationFailed(f"Unknown Expect: {expect}") if headers.get("transfer-encoding") == "chunked": self.request_body = "chunked" pos -= 2 # One CRLF stays in buffer else: self.request_body = True self.request_bytes_left = self.request_bytes = int( headers["content-length"]) # Remove header and its trailing CRLF del buf[:pos + 4] self.stage = Stage.HANDLER self.request, request.stream = request, self self.protocol.state["requests_count"] += 1
async def enable_schedule_handler( request: Request, ip_address: str, phone_id: str, device_id: str, device_password: str, ) -> HTTPResponse: """Use for handling requests to /switcher/enable_schedule. Args: request: ``sanic``'s request object. ip_address: the local ip address. phone_id: the extracted phone id. device_id: the extracted device id. device_password: the extracted device password. Raises: sanic.exceptions.InvalidUsage: when encounterd faulty data. sanic.exceptions.ServerError: when encounterd any error. Returns: Json object represnting the request status. More information is available in the ``Usage`` section. Note: Accepts arguments as json body or query parameters. """ try: if request.args and consts.PARAM_SCHEDULE_DATA in request.args: schedule_data = request.args[consts.PARAM_SCHEDULE_DATA][0] elif request.json and consts.PARAM_SCHEDULE_DATA in request.json: schedule_data = str(request.json[consts.PARAM_SCHEDULE_DATA]) else: raise InvalidUsage("Argument schedule_data is missing.", 400) if len(schedule_data) != 24: raise InvalidUsage("Argument schedule_data is length is no 24.", 400) updated_schedule_data = (schedule_data[0:2] + ENABLE_SCHEDULE + schedule_data[4:]) async with SwitcherV2Api( get_running_loop(), ip_address, phone_id, device_id, device_password, ) as swapi: response = await swapi.disable_enable_schedule( updated_schedule_data) if (response and response.msg_type == messages.ResponseMessageType.DISABLE_ENABLE_SCHEDULE): return json({consts.KEY_SUCCESSFUL: response.successful}) return json({ consts.KEY_SUCCESSFUL: False, consts.KEY_MESSAGE: "Failed enabling the schedule.", }) except ExceptionSet as exc: raise ServerError("Failed enabling the schedule.", 500) from exc
def _get_input(request, id): if id not in request.ctx.session.inputs or request.ctx.session.inputs[ id] is None: raise InvalidUsage('no such input ID') return request.ctx.session.inputs[id]
async def add_jogging_result(request, *args, **kwargs): user_from_token = retrieve_user(request, args, kwargs) if user_from_token is None: raise InvalidUsage("invalid parameter (maybe expired?)") if ( request.json is None or "date" not in request.json or "distance" not in request.json or "time" not in request.json or "location" not in request.json ): raise InvalidUsage( "invalid payload (should be {date, distance, time, location})" ) distance = int(request.json["distance"]) if distance <= 0: raise InvalidUsage("distance needs to be positive") try: date = datetime.datetime.strptime( request.json["date"], "%Y-%m-%d" ).date() except ValueError: raise InvalidUsage("invalid date (should be 'YYYY-MM-DD')") lat_long = request.json["location"].split(" ") if len(lat_long) != 2: raise InvalidUsage("invalid location (should be 'LAT LONG')") try: lat = float(lat_long[0]) long = float(lat_long[1]) except ValueError: raise InvalidUsage( "invalid location (lat & long should be floating-point)" ) if not (-90.0 <= lat <= 90.0 and -180 <= long <= 180): raise InvalidUsage( "invalid location (The latitude must be a number between -90 and 90 and the longitude between -180 and 180)" ) try: time = int(request.json["time"]) except ValueError: raise InvalidUsage("invalid time (time should be an integer)") if time <= 0: raise InvalidUsage("invalid time (time should be positive)") condition = await get_weather_condition(lat, long, date) if condition is None: raise InvalidUsage( "can't fetch running conditions for that location & time" ) user_id = retrieve_user(request, args, kwargs).user_id jog = JoggingResult( user_id, request.json["location"], date, distance, time, json.dumps(condition["data"][0]), ) id = jog.save() return response.json({"result_id": id}, status=201)
async def proxy(self, request: ONSRequest) -> SearchResult: """ Proxy an Elasticsearch query over HTTP :param request: :return: """ # Initialise the search engine engine: AbstractSearchEngine = self.get_search_engine_instance() # Parse the request body for a valid Elasticsearch query body: dict = request.get_elasticsearch_query() # Parse query and filters query: dict = loads(body.get("query")) type_filters_raw = body.get("filter") # Update the search engine with the query JSON engine.update_from_dict(query) # Extract paginator params page = request.get_current_page() page_size = request.get_page_size() sort_by = request.get_sort_by() try: engine: AbstractSearchEngine = engine.paginate(page, page_size) except RequestSizeExceededException as e: # Log and raise a 400 BAD_REQUEST message = "Requested page size exceeds max allowed: '{0}'".format( e) logger.error(request.request_id, message, exc_info=e) raise InvalidUsage(message) # Add any type filters if type_filters_raw is not None: if not isinstance(type_filters_raw, list): type_filters_raw = [type_filters_raw] try: type_filters = AvailableTypeFilters.from_string_list( type_filters_raw) engine: AbstractSearchEngine = engine.type_filter(type_filters) except UnknownTypeFilter as e: message = "Received unknown type filter: '{0}'".format( e.unknown_type_filter) logger.error(request.request_id, message, exc_info=e) raise InvalidUsage(message) # Execute try: logger.debug(request.request_id, "Executing proxy query", extra={"query": engine.to_dict()}) response: ONSResponse = await engine.execute() except ConnectionError as e: message = "Unable to connect to Elasticsearch cluster to perform proxy query request" logger.error(request.request_id, message, e) raise ServerError(message) search_result: SearchResult = response.to_content_query_search_result( page, page_size, sort_by) return search_result
async def check_post_request_data(request): if request.method == "POST" and request.json is None: raise InvalidUsage("POST request data should not be None")
async def geo_ip(request, ip): """Given an IP, return information about the IP.""" if not is_ip(ip): raise InvalidUsage(f'{ip} is not a valid ip') profile = get_profile(ip) return json(profile)
def blueprint_1_error(request: Request): raise InvalidUsage("Invalid")
async def update(self, **kwargs): try: await self.update_from_dict(kwargs) await self.save() except ValueError: raise InvalidUsage('Invalid data')
def get_websocket_connection(self) -> WebSocketConnection: try: return self._websocket_connection except AttributeError: raise InvalidUsage("Improper websocket connection.")
async def update_jogging_result(request, *args, **kwargs): user_from_token = retrieve_user(request, args, kwargs) if user_from_token is None: raise InvalidUsage("invalid parameter (maybe expired?)") try: jogging_id = int(request.path.split("/")[2]) except ValueError as e: raise InvalidUsage(e) if jogging_id < 0: raise InvalidUsage("invalid id") jog = JoggingResult.load_by_jogging_id(jogging_id) if jog is None: raise InvalidUsage("invalid id") user_id_from_token = user_from_token.user_id if user_id_from_token != jog.user_id: raise Forbidden("user can only access user jogs") if "distance" in request.json: distance = request.json["distance"] if distance <= 0: raise InvalidUsage("distance needs to be positive") jog.distance = int(distance) if "date" in request.json: try: date = datetime.datetime.strptime( request.json["date"], "%Y-%m-%d" ).date() except ValueError: raise InvalidUsage("invalid date (should be 'YYYY-MM-DD')") jog.date = date if "location" in request.json: location = request.json["location"] lat_long = location.split(" ") if len(lat_long) != 2: raise InvalidUsage("invalid location (should be 'LAT LONG')") try: lat = float(lat_long[0]) long = float(lat_long[1]) except ValueError: raise InvalidUsage( "invalid location (lat & long should be floating-point)" ) if not (-90.0 <= lat <= 90.0 and -180 <= long <= 180): raise InvalidUsage( "invalid location (The latitude must be a number between -90" " and 90 and the longitude between -180 and 180)" ) jog.location = location location = jog.location lat_long = location.split(" ") lat = float(lat_long[0]) long = float(lat_long[1]) condition = await get_weather_condition(lat, long, jog.date) if condition is None: raise InvalidUsage( "can't fetch running conditions for that location & time" ) jog.condition = condition if "time" in request.json: try: time = int(request.json["time"]) except ValueError: raise InvalidUsage("invalid time (time should be an integer)") if time <= 0: raise InvalidUsage("invalid time (time should be positive)") jog.time = time jog.save() return response.HTTPResponse(status=200)
async def read(self) -> Optional[bytes]: # no cov """ Read some bytes of request body. """ # Send a 100-continue if needed if self.expecting_continue: self.expecting_continue = False await self._send(HTTP_CONTINUE) # Receive request body chunk buf = self.recv_buffer if self.request_bytes_left == 0 and self.request_body == "chunked": # Process a chunk header: \r\n<size>[;<chunk extensions>]\r\n while True: pos = buf.find(b"\r\n", 3) if pos != -1: break if len(buf) > 64: self.keep_alive = False raise InvalidUsage("Bad chunked encoding") await self._receive_more() try: size = int(buf[2:pos].split(b";", 1)[0].decode(), 16) except Exception: self.keep_alive = False raise InvalidUsage("Bad chunked encoding") if size <= 0: self.request_body = None if size < 0: self.keep_alive = False raise InvalidUsage("Bad chunked encoding") # Consume CRLF, chunk size 0 and the two CRLF that follow pos += 4 # Might need to wait for the final CRLF while len(buf) < pos: await self._receive_more() del buf[:pos] return None # Remove CRLF, chunk size and the CRLF that follows del buf[:pos + 2] self.request_bytes_left = size self.request_bytes += size # Request size limit if self.request_bytes > self.request_max_size: self.keep_alive = False raise PayloadTooLarge("Request body exceeds the size limit") # End of request body? if not self.request_bytes_left: self.request_body = None return None # At this point we are good to read/return up to request_bytes_left if not buf: await self._receive_more() data = bytes(buf[:self.request_bytes_left]) size = len(data) del buf[:size] self.request_bytes_left -= size await self.dispatch( "http.lifecycle.read_body", inline=True, context={"body": data}, ) return data
async def get(self, request, id): if id != "": raise InvalidUsage("wrong router", status_code=HTTPStatus.BAD_REQUEST) data = await vk_methods.VKMethods(cfg.vk_token).list_group() return json(data, HTTPStatus.OK)
async def read(self) -> Optional[bytes]: """ Read some bytes of request body. """ # Send a 100-continue if needed if self.expecting_continue: self.expecting_continue = False await self._send(HTTP_CONTINUE) # Receive request body chunk buf = self.recv_buffer if self.request_bytes_left == 0 and self.request_body == "chunked": # Process a chunk header: \r\n<size>[;<chunk extensions>]\r\n while True: pos = buf.find(b"\r\n", 3) if pos != -1: break if len(buf) > 64: self.keep_alive = False raise InvalidUsage("Bad chunked encoding") await self._receive_more() try: size = int(buf[2:pos].split(b";", 1)[0].decode(), 16) except Exception: self.keep_alive = False raise InvalidUsage("Bad chunked encoding") del buf[:pos + 2] if size <= 0: self.request_body = None if size < 0: self.keep_alive = False raise InvalidUsage("Bad chunked encoding") return None self.request_bytes_left = size self.request_bytes += size # Request size limit if self.request_bytes > self.request_max_size: self.keep_alive = False raise PayloadTooLarge("Request body exceeds the size limit") # End of request body? if not self.request_bytes_left: self.request_body = None return None # At this point we are good to read/return up to request_bytes_left if not buf: await self._receive_more() data = bytes(buf[:self.request_bytes_left]) size = len(data) del buf[:size] self.request_bytes_left -= size return data
def _get_user_by_id(self, user_id: str): user = self.db.fetchone_query( f"SELECT * FROM users WHERE user_id='{user_id}'") if not user: raise InvalidUsage('Could not locate user.') return user
async def set_auto_shutdown_handler( request: Request, ip_address: str, phone_id: str, device_id: str, device_password: str, ) -> HTTPResponse: """Use for handling requests to /switcher/set_auto_shutdown. Args: request: ``sanic``'s request object. ip_address: the local ip address. phone_id: the extracted phone id. device_id: the extracted device id. device_password: the extracted device password. Raises: sanic.exceptions.InvalidUsage: when requested is not 59-180 minutes. sanic.exceptions.ServerError: when encounterd any error. Returns: Json object represnting the request status. More information is available in the ``Usage`` section. Note: Accepts arguments as json body or query parameters. """ try: if (request.args and consts.PARAM_HOURS in request.args and consts.PARAM_MINUTES in request.args): hours = int(request.args[consts.PARAM_HOURS][0]) minutes = int(request.args[consts.PARAM_MINUTES][0]) elif (request.json and consts.PARAM_HOURS in request.json and consts.PARAM_MINUTES in request.json): hours = int(request.json[consts.PARAM_HOURS]) minutes = int(request.json[consts.PARAM_MINUTES]) else: raise InvalidUsage( "One of the arguments hours or minutes is missing.", 400) time_guard = (hours * 60 if hours > 0 else 0) + (minutes if minutes > 0 else 0) if time_guard < 59 or time_guard > 180: raise InvalidUsage( "Auto shutdown can be set between 1 and 3 hours.", 400) time_to_off_timedelta = timedelta(hours=hours, minutes=minutes) async with SwitcherV2Api( get_running_loop(), ip_address, phone_id, device_id, device_password, ) as swapi: response = await swapi.set_auto_shutdown(time_to_off_timedelta) if (response and response.msg_type == messages.ResponseMessageType.AUTO_OFF): return json({consts.KEY_SUCCESSFUL: response.successful}) return json({ consts.KEY_SUCCESSFUL: False, consts.KEY_MESSAGE: "Failed setting auto shutdown on device.", }) except ExceptionSet as exc: raise ServerError("Failed setting auto shutdown on device.", 500) from exc
async def remove(cls, **kwargs): """For User, only disabled it, without completely delete it.""" if "id" not in kwargs: raise InvalidUsage("Missing field 'id' in query parameter") await super(BaseUser, cls).modify(kwargs, {"disabled": True})
def handler_1(request): raise InvalidUsage("OK")
async def wrapped(self, request, **kwargs): token = request.headers.get("token") if not token: raise InvalidUsage(message='Token is not provided.') return await func(self, request, **kwargs)
async def on_get_quote(request): json = request.json # Handle body matching def get(tag): if tag not in json: raise InvalidUsage('JSON is missing: \'{}\''.format(tag)) return request.json[tag].strip().upper() base_currency, quote_currency, action = get('base_currency'), get( 'quote_currency'), get('action') amount = float(get('amount')) product_id, inverted = match_product_id(base_currency, quote_currency) # match the type of order to the type of dataset book = client.get_book(product_id) if book is None: raise InvalidUsage('No data available yet!') if action == 'BUY': data = book.get_asks() elif action == "SELL": data = book.get_bids() if inverted: raise InvalidUsage( 'Base currency and quote currency reversed for sell quote!') else: raise InvalidUsage('Unknown action type!') properties = products[product_id] weights, values = [], [] # Find the scalar which is used to convert floats to the nearest integer for weighting # Precision is used in exporting result with proper number of decimals if inverted: # All cryptos seem to be fixed at 8 decimals precision = 8 amount_scalar = float(properties['quote_increment']) else: # get base currency decimals precision = abs(properties['quote_increment'].as_tuple().exponent) amount_scalar = float(properties['base_min_size'] * properties['quote_increment']) # Go through converting prices and sizes to weights and values for (price, size) in data: exchange_rate = (size * price) if inverted: weights.append(int(exchange_rate / amount_scalar)) values.append(size) else: weights.append(int(size / amount_scalar)) values.append(exchange_rate) # run knapsack algorithm total = knapsack(values[:50], weights[:50], amount / amount_scalar) # return pretty formatted json result return jsonify({ 'price': '{:.{prec}f}'.format(total / amount, prec=precision), 'total': '{:.{prec}f}'.format(total, prec=precision), 'currency': quote_currency })
async def wrapped(self, request, **kwargs): if request.json is None: raise InvalidUsage('Data is not provided.') return await func(self, request, **kwargs)
def _get_overlay(request, id): if id not in request[ 'session'].overlays or request['session'].overlays[id] is None: raise InvalidUsage('no such overlay ID') return request['session'].overlays[id]
def __init__(self, message, error_id=None): InvalidUsage.__init__(self, message) SynseError.__init__(self, message, error_id)
def _get_output(request, id): if id not in request[ 'session'].outputs or request['session'].outputs[id] is None: raise InvalidUsage('no such output ID') return request['session'].outputs[id]
def get_row_and_column(args): row = args.get('row') column = args.get('column') if row is None or column is None: raise InvalidUsage("'row' and 'column' required") return row, column