def on_json_loading_failed(self, e): """Called if decoding of the JSON data failed. The return value of this method is used by :attr:`json` when an error ocurred. """ I_M_BAD = BadRequest() I_M_BAD.json_parse_error = True raise I_M_BAD
def test_bad_request(self, PredictService, flask_app): predict_service = PredictService(mapping=[]) model = Mock() request = Mock() with patch.object(predict_service, 'do') as psd: with flask_app.test_request_context(): bad_request = BadRequest() bad_request.args = ('daniel',) psd.side_effect = bad_request resp = predict_service(model, request) resp_data = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 400 assert resp_data == { "metadata": { "status": "ERROR", "error_code": -1, "error_message": "BadRequest: ('daniel',)" } }
def on_json_loading_failed(self, e): if current_app and current_app.debug: raise BadRequest(f"Failed to decode JSON object: {e}") raise BadRequest()
def create(): entity_id, entity_type = None, None for entity_type in ENTITY_TYPES: entity_id = request.args.get(entity_type) if entity_id: entity_type = entity_type break if not (entity_id or entity_type): logging.warning("Unsupported entity type") raise BadRequest("Unsupported entity type") if not entity_id: flash.info(gettext("Please choose an entity to review.")) return redirect(url_for('search.selector', next=url_for('.create'))) if current_user.is_blocked: flash.error(gettext("You are not allowed to write new reviews because your " "account has been blocked by a moderator.")) return redirect(url_for('user.reviews', user_id=current_user.id)) # Checking if the user already wrote a review for this entity reviews, count = db_review.list_reviews(user_id=current_user.id, entity_id=entity_id) review = reviews[0] if count is not 0 else None if review: flash.error(gettext("You have already published a review for this entity!")) return redirect(url_for('review.entity', id=review["id"])) form = ReviewCreateForm(default_license_id=current_user.license_choice, default_language=get_locale()) if form.validate_on_submit(): if current_user.is_review_limit_exceeded: flash.error(gettext("You have exceeded your limit of reviews per day.")) return redirect(url_for('user.reviews', user_id=current_user.id)) is_draft = form.state.data == 'draft' if form.text.data == '': form.text.data = None review = db_review.create(user_id=current_user.id, entity_id=entity_id, entity_type=entity_type, text=form.text.data, rating=form.rating.data, license_id=form.license_choice.data, language=form.language.data, is_draft=is_draft) if form.remember_license.data: db_users.update(current_user.id, user_new_info={ "license_choice": form.license_choice.data, }) if is_draft: flash.success(gettext("Review has been saved!")) else: flash.success(gettext("Review has been published!")) return redirect(url_for('.entity', id=review['id'])) entity = get_entity_by_id(entity_id, entity_type) if not entity: flash.error(gettext("You can only write a review for an entity that exists on MusicBrainz!")) return redirect(url_for('search.selector', next=url_for('.create'))) if entity_type == 'release_group': spotify_mappings = mbspotify.mappings(entity_id) soundcloud_url = soundcloud.get_url(entity_id) if not form.errors: flash.info(gettext("Please provide some text or a rating for this review.")) return render_template('review/modify/write.html', form=form, entity_type=entity_type, entity=entity, spotify_mappings=spotify_mappings, soundcloud_url=soundcloud_url) if not form.errors: flash.info(gettext("Please provide some text or a rating for this review.")) return render_template('review/modify/write.html', form=form, entity_type=entity_type, entity=entity)
def get(self, id): match = Books.query.filter(Books.id == id).first() if match is None: raise BadRequest('id not present') return match
def failing_application(request): raise BadRequest()
def fetch(self, environ, request, uri): args = {'uri': uri, 'after': request.args.get('after', 0)} try: args['limit'] = int(request.args.get('limit')) except TypeError: args['limit'] = None except ValueError: return BadRequest("limit should be integer") if request.args.get('parent') is not None: try: args['parent'] = int(request.args.get('parent')) root_id = args['parent'] except ValueError: return BadRequest("parent should be integer") else: args['parent'] = None root_id = None plain = request.args.get('plain', '0') == '0' reply_counts = self.comments.reply_count(uri, after=args['after']) if args['limit'] == 0: root_list = [] else: root_list = list(self.comments.fetch(**args)) if root_id not in reply_counts: reply_counts[root_id] = 0 try: nested_limit = int(request.args.get('nested_limit')) except TypeError: nested_limit = None except ValueError: return BadRequest("nested_limit should be integer") rv = { 'id': root_id, 'total_replies': reply_counts[root_id], 'hidden_replies': reply_counts[root_id] - len(root_list), 'replies': self._process_fetched_list(root_list, plain) } # We are only checking for one level deep comments if root_id is None: for comment in rv['replies']: if comment['id'] in reply_counts: comment['total_replies'] = reply_counts[comment['id']] if nested_limit is not None: if nested_limit > 0: args['parent'] = comment['id'] args['limit'] = nested_limit replies = list(self.comments.fetch(**args)) else: replies = [] else: args['parent'] = comment['id'] replies = list(self.comments.fetch(**args)) else: comment['total_replies'] = 0 replies = [] comment['hidden_replies'] = comment['total_replies'] - \ len(replies) comment['replies'] = self._process_fetched_list(replies, plain) return JSON(rv, 200)
def handle_assertion(self, request): """Handle SAML Authentication login assertion (POST). Args: request (Request): Flask request object for this HTTP transaction. Returns: User Id (string), User attributes (dict), Redirect Flask response object to return user to now that authentication is complete. Raises: BadRequest: when error with SAML response from Identity Provider. AuthException: when unable to locate uid attribute in response. """ if not request.form.get('SAMLResponse'): raise BadRequest('SAMLResponse missing from POST') # retrieve cache outstanding_queries_cache = \ AuthDictCache(session, '_saml_outstanding_queries') identity_cache = IdentityCache(session, '_saml_identity') LOGGER.debug('Outstanding queries cache %s' % ( outstanding_queries_cache)) LOGGER.debug('Identity cache %s' % identity_cache) # use pysaml2 to process the SAML authentication response client = Saml2Client(self._config, identity_cache=identity_cache, logger=LOGGER) saml_response = client.response( dict(SAMLResponse=request.form['SAMLResponse']), outstanding_queries_cache) if saml_response is None: raise BadRequest('SAML response is invalid') # make sure outstanding query cache is cleared for this session_id session_id = saml_response.session_id() if session_id in outstanding_queries_cache.keys(): del outstanding_queries_cache[session_id] outstanding_queries_cache.sync() # retrieve session_info saml_session_info = saml_response.session_info() LOGGER.debug('SAML Session Info ( %s )' % saml_session_info) # retrieve user data via API try: if self.attribute_map.get('uid', 'name_id') == 'name_id': user_id = saml_session_info.get('name_id') else: user_id = saml_session_info['ava'] \ .get(self.attribute_map.get('uid'))[0] except: raise AuthException('Unable to find "%s" attribute in response' % ( self.attribute_map.get('uid', 'name_id'))) # Future: map attributes to user info user_attributes = dict() # set subject Id in cache to retrieved name_id session['_saml_subject_id'] = saml_session_info.get('name_id') LOGGER.debug('Outstanding queries cache %s' % ( session['_saml_outstanding_queries'])) LOGGER.debug('Identity cache %s' % session['_saml_identity']) LOGGER.debug('Subject Id %s' % session['_saml_subject_id']) relay_state = request.form.get('RelayState', '/') LOGGER.debug('Returning redirect to %s' % relay_state) return user_id, user_attributes, redirect(relay_state)
def _forbidden_attributes(self, data): for key in list(data.keys()): if key in self.reserved_keys: raise BadRequest("Reserved keys in payload")
def findById(uid): try: return next(dog for dog in dogs if dog['id'] == uid) except: raise BadRequest(f"Dog with id {uid}, does not exist")
def handle_json(data): try: user = data['user'] except KeyError: raise BadRequest("Missing key 'user'") join_room(user)
def new_fn(project, filename, *args, **kwargs): if re.match('[a-zA-Z0-9_-]+$', project) or re.match( '[a-zA-Z0-9_-]+$', filename): return fn(project, filename, *args, **kwargs) else: raise BadRequest()
def __init__(self, param_name): BadRequest.__init__(self) self.description = "Missing parameter: {0}".format(param_name)
def test_handle_error_does_not_swallow_exceptions(self, api): exception = BadRequest('x') resp = api.handle_error(exception) assert resp.status_code == 400 assert resp.get_data() == b'{"message": "x"}\n'
def new(self, environ, request, uri): data = request.get_json() for field in set(data.keys()) - API.ACCEPT: data.pop(field) for key in ("author", "email", "website", "parent"): data.setdefault(key, None) valid, reason = API.verify(data) if not valid: return BadRequest(reason) for field in ("author", "email", "website"): if data.get(field) is not None: data[field] = escape(data[field], quote=False) if data.get("website"): data["website"] = normalize(data["website"]) data['mode'] = 2 if self.moderated else 1 data['remote_addr'] = self._remote_addr(request) with self.isso.lock: if uri not in self.threads: if 'title' not in data: with http.curl('GET', local("origin"), uri) as resp: if resp and resp.status == 200: uri, title = parse.thread(resp.read(), id=uri) else: return NotFound('URI does not exist %s') else: title = data['title'] thread = self.threads.new(uri, title) self.signal("comments.new:new-thread", thread) else: thread = self.threads[uri] # notify extensions that the new comment is about to save self.signal("comments.new:before-save", thread, data) valid, reason = self.guard.validate(uri, data) if not valid: self.signal("comments.new:guard", reason) raise Forbidden(reason) with self.isso.lock: # if email-based auto-moderation enabled, check for previously approved author # right before approval. if self.approve_if_email_previously_approved and self.comments.is_previously_approved_author( data['email']): data['mode'] = 1 rv = self.comments.add(uri, data) # notify extension, that the new comment has been successfully saved self.signal("comments.new:after-save", thread, rv) cookie = functools.partial(dump_cookie, value=self.isso.sign( [rv["id"], sha1(rv["text"])]), max_age=self.conf.getint('max-age')) rv["text"] = self.isso.render(rv["text"]) rv["hash"] = self.hash(rv['email'] or rv['remote_addr']) self.cache.set('hash', (rv['email'] or rv['remote_addr']).encode('utf-8'), rv['hash']) rv = self._add_gravatar_image(rv) for key in set(rv.keys()) - API.FIELDS: rv.pop(key) # success! self.signal("comments.new:finish", thread, rv) resp = JSON(rv, 202 if rv["mode"] == 2 else 201) resp.headers.add("Set-Cookie", cookie(str(rv["id"]))) resp.headers.add("X-Set-Cookie", cookie("isso-%i" % rv["id"])) return resp
def post(self): data = request.json or {} game = self.db.get_doc("games", data["id"]) if not game: raise BadRequest(description="Game not found.") if game["won"]: raise Unauthorized( description= "You cannot play a game that has already been concluded.") if self.user_data["username"] not in (game["defender_username"], game["challenger_username"]): raise Unauthorized( description="You are not a player in this game.") try: if self.user_data["username"] == game["defender_username"]: attacker = game["defender_stats"] victim = game["challenger_stats"] # Let's do the check for a game cancel in here as we already know the user's position if data["move"].lower() == "cancel": self.db.run( self.db.query("games").get(data["id"]).update( {"won": "challenger"})) return { "success": True, "game_data": self.db.get_doc("games", data["id"]) } else: attacker = game["challenger_stats"] victim = game["defender_stats"] # Let's do the check for a game cancel in here as we already know the user's position if data["move"].lower() == "cancel": self.db.run( self.db.query("games").get(data["id"]).update( {"won": "defender"})) return { "success": True, "game_data": self.db.get_doc("games", data["id"]) } win = False special = False update = None # "attacker" or "victim" valid = False if self.user_data["username"] == game["turn"]: if data["move"].lower() == "grapple": valid = True if attacker["strength"] + randint( -5, 5) > victim["dexterity"] + randint(-5, 5): win = True elif data["move"].lower() == "punch": valid = True if attacker["dexterity"] + randint( -5, 5) > victim["strength"] + randint(-5, 5): win = True elif data["move"].lower() == "kick": valid = True if attacker["dexterity"] + randint( -5, 5) > victim["dexterity"] + randint(-5, 5): win = True elif data["move"].lower() in ("lightning", "wither", "gamble"): # Check if the character can use this special ability if attacker["special"] != data["move"].lower(): raise BadRequest( "You cannot use this ability, " "because your character's special ability is {0}". format(attacker["special"])) valid = True special = True updated_stats = None if special: if data["move"].lower() == "gamble": if randint(1, 2) == 1: # You lose points update = "attacker" updated_stats = attacker updated_stats["strength"] -= 1 updated_stats["dexterity"] -= 1 else: # They lose points update = "victim" updated_stats = victim updated_stats["strength"] -= 1 updated_stats["dexterity"] -= 1 elif data["move"].lower() == "lightning": update = "victim" updated_stats = victim updated_stats["dexterity"] -= 1 elif data["move"].lower() == "wither": update = "victim" updated_stats = victim updated_stats["strength"] -= 1 elif win: update = "victim" updated_stats = victim if data["move"].lower() == "grapple": updated_stats["dexterity"] -= 1 updated_stats["health"] -= 1 elif data["move"].lower() == "punch": updated_stats["strength"] -= 1 updated_stats["health"] -= 1 elif data["move"].lower() == "kick": updated_stats["strength"] -= 1 updated_stats["dexterity"] -= 1 updated_stats["health"] -= 1 if valid: if self.user_data["username"] == game["defender_username"]: # Update the turn to the next person self.db.run( self.db.query("games").get(data["id"]).update( {"turn": game["challenger_username"]})) if updated_stats: if update == "attacker": self.db.run( self.db.query("games").get( data["id"]).update( {"defender_stats": updated_stats})) else: self.db.run( self.db.query("games").get( data["id"]).update({ "challenger_stats": updated_stats })) if special and update == "attacker": health_update = updated_stats health_update["health"] -= 1 self.db.run( self.db.query("games").get( data["id"]).update( {"defender_stats": health_update})) elif special and update == "victim": health_update = attacker health_update["health"] -= 1 self.db.run( self.db.query("games").get( data["id"]).update( {"defender_stats": health_update})) elif self.user_data["username"] == game[ "challenger_username"]: # Update the turn to the next person self.db.run( self.db.query("games").get(data["id"]).update( {"turn": game["defender_username"]})) if updated_stats: if update == "attacker": self.db.run( self.db.query("games").get( data["id"]).update({ "challenger_stats": updated_stats })) else: self.db.run( self.db.query("games").get( data["id"]).update( {"defender_stats": updated_stats})) if special and update == "attacker": health_update = updated_stats health_update["health"] -= 1 self.db.run( self.db.query("games").get( data["id"]).update({ "challenger_stats": health_update })) elif special and update == "victim": health_update = attacker health_update["health"] -= 1 self.db.run( self.db.query("games").get( data["id"]).update({ "challenger_stats": health_update })) # Increase the turn counter by one self.db.run( self.db.query("games").get(data["id"]).update( {"turn_number": game["turn_number"] + 1})) # Our old DB document may be out of date now so let's get a new copy health_check = self.db.get_doc("games", data["id"]) if health_check["defender_stats"]["health"] <= 0: self.db.run( self.db.query("games").get(data["id"]).update( {"won": "challenger"})) elif health_check["challenger_stats"]["health"] <= 0: self.db.run( self.db.query("games").get(data["id"]).update( {"won": "defender"})) elif health_check["turn_number"] > health_check[ "max_turns"]: self.db.run( self.db.query("games").get(data["id"]).update( {"won": "tie"})) return { "success": True, "data": self.db.get_doc("games", data['id']) } else: raise BadRequest(description="It is not your turn.") raise BadRequest(description="Please enter a valid move.") except ValueError: raise BadRequest(description="A move is required!")
def _check_ns(ns): if ns != current_app.ns: raise BadRequest("Bad namespace")
def token_check(): if "token" in request.headers: return jsonify( {"Result": is_token_valid(request.headers.get("token"))}) else: raise BadRequest("Token was not given")
def generate_bad_request(error_code, message, **kwargs): message_dict = {'code': error_code, 'description': message} message_dict.update(kwargs) return BadRequest(message_dict)
def authenticate(self, next_url='/', binding=BINDING_HTTP_REDIRECT): """Start SAML Authentication login process. Args: next_url (string): HTTP URL to return user to when authentication is complete. binding (binding): Saml2 binding method to use for request, default BINDING_HTTP_REDIRECT (don't change til HTTP_POST support is complete in pysaml2. Returns: Flask Response object to return to user containing either HTTP_REDIRECT or HTTP_POST SAML message. Raises: AuthException: when unable to locate valid IdP. BadRequest: when invalid result returned from SAML client. """ # find configured for IdP for requested binding method idp_entityid = '' idps = self._config.idps().keys() for idp in idps: if self._config.single_sign_on_services(idp, binding) != []: idp_entityid = idp break if idp_entityid == '': raise AuthException('Unable to locate valid IdP for this request') # fail if signing requested but no private key configured if self._config.authn_requests_signed == 'true': if not self._config.key_file \ or not os.path.exists(self._config.key_file): raise AuthException( 'Signature requested for this Saml authentication request,' ' but no private key file configured') LOGGER.debug('Connecting to Identity Provider %s' % idp_entityid) # retrieve cache outstanding_queries_cache = \ AuthDictCache(session, '_saml_outstanding_queries') LOGGER.debug('Outstanding queries cache %s' % ( outstanding_queries_cache)) # make pysaml2 call to authenticate client = Saml2Client(self._config, logger=LOGGER) (session_id, result) = client.authenticate( entityid=idp_entityid, relay_state=next_url, binding=binding) # The psaml2 source for this method indicates that BINDING_HTTP_POST # should not be used right now to authenticate. Regardless, we'll # check for it and act accordingly. if binding == BINDING_HTTP_REDIRECT: LOGGER.debug('Redirect to Identity Provider %s ( %s )' % ( idp_entityid, result)) response = make_response('', 302, dict([result])) elif binding == BINDING_HTTP_POST: LOGGER.warn('POST binding used to authenticate is not currently' ' supported by pysaml2 release version. Fix in place in repo.') LOGGER.debug('Post to Identity Provider %s ( %s )' % ( idp_entityid, result)) response = make_response('\n'.join(result), 200) else: raise BadRequest('Invalid result returned from SAML client') LOGGER.debug( 'Saving session_id ( %s ) in outstanding queries' % session_id) # cache the outstanding query outstanding_queries_cache.update({session_id: next_url}) outstanding_queries_cache.sync() LOGGER.debug('Outstanding queries cache %s' % ( session['_saml_outstanding_queries'])) return response
def check_required(key, json, array_name, entity_id=''): if key not in json: raise BadRequest("'" + key + "' is a required property - '" + array_name + "." + entity_id + "'")
def feed(self, environ, request, uri): conf = self.isso.conf.section("rss") if not conf.get('base'): raise NotFound args = { 'uri': uri, 'order_by': 'id', 'asc': 0, 'limit': conf.getint('limit') } try: args['limit'] = max(int(request.args.get('limit')), args['limit']) except TypeError: pass except ValueError: return BadRequest("limit should be integer") comments = self.comments.fetch(**args) base = conf.get('base').rstrip('/') hostname = urlparse(base).netloc # Let's build an Atom feed. # RFC 4287: https://tools.ietf.org/html/rfc4287 # RFC 4685: https://tools.ietf.org/html/rfc4685 (threading extensions) # For IDs: http://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id feed = ET.Element( 'feed', { 'xmlns': 'http://www.w3.org/2005/Atom', 'xmlns:thr': 'http://purl.org/syndication/thread/1.0' }) # For feed ID, we would use thread ID, but we may not have # one. Therefore, we use the URI. We don't have a year # either... id = ET.SubElement(feed, 'id') id.text = 'tag:{hostname},2018:/isso/thread{uri}'.format( hostname=hostname, uri=uri) # For title, we don't have much either. Be pretty generic. title = ET.SubElement(feed, 'title') title.text = 'Comments for {hostname}{uri}'.format(hostname=hostname, uri=uri) comment0 = None for comment in comments: if comment0 is None: comment0 = comment entry = ET.SubElement(feed, 'entry') # We don't use a real date in ID either to help with # threading. id = ET.SubElement(entry, 'id') id.text = 'tag:{hostname},2018:/isso/{tid}/{id}'.format( hostname=hostname, tid=comment['tid'], id=comment['id']) title = ET.SubElement(entry, 'title') title.text = 'Comment #{}'.format(comment['id']) updated = ET.SubElement(entry, 'updated') updated.text = '{}Z'.format( datetime.fromtimestamp(comment['modified'] or comment['created']).isoformat()) author = ET.SubElement(entry, 'author') name = ET.SubElement(author, 'name') name.text = comment['author'] ET.SubElement( entry, 'link', { 'href': '{base}{uri}#isso-{id}'.format( base=base, uri=uri, id=comment['id']) }) content = ET.SubElement(entry, 'content', { 'type': 'html', }) content.text = self.isso.render(comment['text']) if comment['parent']: ET.SubElement( entry, 'thr:in-reply-to', { 'ref': 'tag:{hostname},2018:/isso/{tid}/{id}'.format( hostname=hostname, tid=comment['tid'], id=comment['parent']), 'href': '{base}{uri}#isso-{id}'.format( base=base, uri=uri, id=comment['parent']) }) # Updated is mandatory. If we have comments, we use the date # of last modification of the first one (which is the last # one). Otherwise, we use a fixed date. updated = ET.Element('updated') if comment0 is None: updated.text = '1970-01-01T01:00:00Z' else: updated.text = datetime.fromtimestamp( comment0['modified'] or comment0['created']).isoformat() updated.text += 'Z' feed.insert(0, updated) output = StringIO() ET.ElementTree(feed).write(output, encoding='utf-8', xml_declaration=True) response = XML(output.getvalue(), 200) # Add an etag/last-modified value for caching purpose if comment0 is None: response.set_etag('empty') response.last_modified = 0 else: response.set_etag('{tid}-{id}'.format(**comment0)) response.last_modified = comment0['modified'] or comment0['created'] return response.make_conditional(request)
def check_id(json, array_name): if ID not in json: raise BadRequest("'" + ID + "' is a required property - '" + array_name)
def update_container(self, account_id, name, mtime, dtime, object_count, bytes_used, damaged_objects, missing_chunks, bucket_name=None, autocreate_account=None, autocreate_container=True, **kwargs): if not account_id or not name: raise BadRequest("Missing account or container") if autocreate_account is None: autocreate_account = self.autocreate if mtime is None: mtime = '0' else: mtime = Timestamp(mtime).normal if dtime is None: dtime = '0' else: dtime = Timestamp(dtime).normal if object_count is None: object_count = 0 if bytes_used is None: bytes_used = 0 if damaged_objects is None: damaged_objects = 0 if missing_chunks is None: missing_chunks = 0 # If no bucket name is provided, set it to '' # (we cannot pass None to the Lua script). bucket_name = bucket_name or '' bucket_lock = self.blockkey(bucket_name) now = Timestamp().normal ckey = AccountBackend.ckey(account_id, name) keys = [ self.akey(account_id), ckey, self.clistkey(account_id), self._bucket_prefix, self.blistkey(account_id) ] args = [ account_id, name, bucket_name, bucket_lock, mtime, dtime, object_count, bytes_used, damaged_objects, missing_chunks, str(autocreate_account), now, EXPIRE_TIME, str(autocreate_container) ] try: self.script_update_container(keys=keys, args=args) except redis.exceptions.ResponseError as exc: if text_type(exc).endswith("no_account"): raise NotFound("Account %s not found" % account_id) if text_type(exc).endswith("no_container"): raise NotFound("Container %s not found" % name) elif text_type(exc).endswith("no_update_needed"): raise Conflict("No update needed, " "event older than last container update") else: raise return name
def is_exist_by_id(clazz, entity_id, session): entity_name = clazz.__name__ if session.query(clazz).filter_by(id=uuid.UUID(entity_id)).first() is None: raise BadRequest(entity_name + ' is not registered, id: ' + entity_id)
def _check_extension(filename, allowed_extensions): if ('.' not in filename or filename.split('.').pop().lower() not in allowed_extensions): raise BadRequest( "{0} has an invalid name or extension".format(filename))
def bad_query_parameter(self, attrname): return BadRequest( 'Unknown or unsupported query parameter {0}'.format(attrname))
def get_private_token_url(): raise BadRequest('not implemented')
def raise_node_not_found(id): raise BadRequest("can't find node by id '%s'" % str(id))
def get_raw_file_content(repo, tree, path): git_obj = get_object_from_path(repo, tree, path) if git_obj.type != GIT_OBJ_BLOB: raise BadRequest("path resolved to non-blob object") return git_obj.data
def _parse_fix(pilot_id): fix = TrackingFix() fix.ip = request.remote_addr fix.pilot_id = pilot_id # Time if 'tm' not in request.values: raise BadRequest('`tm` (time) parameter is missing.') try: fix.time = datetime.utcfromtimestamp(int(request.values['tm'])) except ValueError: raise BadRequest('`tm` (time) has to be a POSIX timestamp.') # Location if 'lat' in request.values and 'lon' in request.values: try: fix.set_location(float(request.values['lon']), float(request.values['lat'])) except ValueError: raise BadRequest( '`lat` and `lon` have to be floating point value in degrees (WGS84).' ) # Altitude if 'alt' in request.values: try: fix.altitude = int(request.values['alt']) except ValueError: raise BadRequest('`alt` has to be an integer value in meters.') if not -1000 <= fix.altitude <= 15000: raise BadRequest( '`alt` has to be a valid altitude in the range of -1000 to 15000 meters.' ) # Speed if 'sog' in request.values: try: fix.ground_speed = int(request.values['sog']) / 3.6 except ValueError: raise BadRequest( '`sog` (speed over ground) has to be an integer value in km/h.' ) if not 0 <= fix.ground_speed <= (500 / 3.6): raise BadRequest( '`sog` (speed over ground) has to be a valid speed in the range of 0 to 500 km/h.' ) # Track if 'cog' in request.values: try: fix.track = int(request.values['cog']) except ValueError: raise BadRequest( '`cog` (course over ground) has to be an integer value in degrees.' ) if not 0 <= fix.track < 360: raise BadRequest( '`cog` (course over ground) has to be a valid angle between 0 and 360 degrees.' ) fix.elevation = Elevation.get(fix.location_wkt) return fix
def abort(message): resp = BadRequest() resp.description=message return resp
def parse_args(self): try: data = self.parser.parse_args() except BadRequest: raise BadRequest(DEFAULT_MISSING_REQUIRED) return data
def bad_request(message): e = BadRequest(message) e.data = {'error': message} return e
def close(self): try: self.stream.close() except: pass def __nonzero__(self): return bool(self.filename) def __getattr__(self, name): return getattr(self.stream, name) def __iter__(self): return iter(self.readline, '') def __repr__(self): return '<%s: %r (%r)>' % (self.__class__.__name__, self.filename, self.content_type) from werkzeug.http import dump_options_header, dump_header, generate_etag, quote_header_value, parse_set_header, unquote_etag from werkzeug.exceptions import BadRequest for _cls in (MultiDict, OrderedMultiDict, CombinedMultiDict, Headers, EnvironHeaders): _cls.KeyError = BadRequest.wrap(KeyError, _cls.__name__ + '.KeyError') del _cls