def get(self, p): if not files.exists(p): abort(404, message=('File not found: %s' % p)) if files.is_directory(p): return [format_file_info(f) for f in files.directory_listing(p)] else: return send_file(files.absolute(p))
def get(self, uri): #get details of <id> user if not DataM.is_user_for_key(self.key, uri): abort(404, status = FAILURE, message = RESOURCE_NOT_FOUND) args = self.parser.parse_args() if args['action'] == 'read': interest_list, length = DataM.get_interests_for_user_for_key(self.key, uri, interest_types = (SUPPLIED)) return jsonify(status = SUCCESS, uid = uri, num_interests = length, interests = list(interest_list)) elif args['action'] == 'interest_score': resp = {} kws_toscore = args['keyword_args'] s = LearnM.score_all(self.key, uri, kws_toscore.split(DELIM)) return jsonify(status = SUCCESS, uid = uri, score = s) elif args['action'] == 'classify': kw_toclassify = args['keyword_args'] if kw_toclassify: pass url_toclassify = args['url_args'] if url_toclassify: pass elif args['action'] == 'generate': interest_list, length = DataM.get_interests_for_user_for_key(self.key, uri, interest_types = (GENERATED)) return jsonify(status = SUCCESS, uid = uri, num_interests = length, interests = list(interest_list)) abort(400, status = FAILURE, message = INVALID_ARG) #control should not reach here except in case of error
def get(self, ip_address): """Lookup a PXE entry for the given ip_address.""" client_config = pxe.ClientConfig(ip_address) if client_config.exists(): return vars(client_config) else: abort(404)
def delete(self, p): if not files.exists(p): abort(404, message=('File not found: %s' % p)) try: files.delete(p) except OSError as e: abort(501, 'File system error: ' + e.strerror)
def __init__(self): self.key = SecurityM.check_header(request.headers) if self.key == -1: abort(401, status = FAILURE, message = AUTH_FAIL) self.parser = reqparse.RequestParser() self.parser.add_argument('text', type = str, default = "") self.parser.add_argument('url', type = str, default = "")
def abort(code=http_exceptions.InternalServerError.code, message=None, **kwargs): '''Properly abort the current request''' if message or kwargs and 'status' not in kwargs: kwargs['status'] = code if message: kwargs['message'] = str(message) restful.abort(code, **kwargs)
def post(self): """ Accepts a new PXE entry Add a PXE entry for the given ip_address. Password, uuid, script and IPv6 parameteres are optional parameters. The IPv6 parameters are used only when all of them are provided. Missing password or uuid will be auto generated by ClientConfig. """ args = parser.parse_args() client_config = pxe.ClientConfig( args.ip_address, args.password, args.script, args.uuid, args.ipv6_address, args.ipv6_gateway, args.ipv6_prefix ) try: client_config.create(pxe.Label.find(args.label)) location = url_for( 'pxeobject', _method='GET', ip_address=client_config.ip_address) return vars(client_config), 201, {'Location': location} except pxe.exceptions.InputError as exception: abort(400, message=str(exception)) except Exception as exception: abort(500, message=str(exception))
def give_item_hero(request_user, target_metadata_id): target_metadata = MetadataHeroModel.query.\ filter(MetadataHeroModel.id == target_metadata_id).\ first() subquery_find_metadata_id = db.session.query(MetadataHeroModel.id).\ filter(MetadataHeroModel.name == target_metadata.name).\ subquery() hero = HeroModel.query. \ filter(HeroModel.user_id == request_user.id).\ filter(HeroModel.hero_metadata_id.in_(subquery_find_metadata_id)).\ first() if hero is None: abort(400) result = dict() if hero.visible: hero.soul_stone += \ get_constant_value(CONSTANTS_KEY_HERO_SOUL_STONE_BY_GRADE, target_metadata.grade) result['soul_stone'] = hero.soul_stone else: hero.visible = True equipment = give_hero_basic_equipment(request_user, hero) result['equipment'] = equipment result['hero_id'] = hero.id return result
def delete(self, year): temperature = session.query(Temperature).filter(Temperature.year == year).first() if not temperature: abort(404, message="Temperature {} doesn't exist".format(year)) session.delete(temperature) session.commit() return {}, 204
def handle_related(self, query): if 'on' not in query or 'pk' not in query: abort(400) try: return self.to_dict(cls.objects(**{ query['on']: query['pk'] })) except InvalidQueryError: abort(403)
def put(self, permission_id): permission = \ models.Permission.query.filter_by(id=permission_id).first() if permission is None: abort(404) payload = request.get_json() for group_id, action in payload.iteritems(): group = models.Group.query.filter_by(id=group_id).first() if group is None: abort(403) if action == 'add': permission.groups.append(group) elif action == 'delete': if group not in permission.groups: abort(403) permission.groups.remove(group) else: abort(403) db.session.add(permission) try: db.session.commit() except IntegrityError: db.session.rollback() abort(403) return permission.to_json(), 200
def post(self): if not request.json: abort(400) else: json = request.json user_t = User.query.filter_by(id=json["id"]).first_or_404() if "temperature" in json: temperature = json["temperature"] else: temperature = None if "humidity" in json: humidity = json["humidity"] else: humidity = None if "uv" in json: uv = json["uv"] else: uv = None if "pressure" in json: pressure = json["pressure"] else: pressure = None new_data = ShareData( time=json["time"], longitude=json["longitude"], latitude=json["latitude"], user=user_t, temperature=temperature, humidity=humidity, uv=uv, pressure=pressure, ) db.session.add(new_data) db.session.commit() return {"devicedata": json}, 201
def put(self): if len(request.json) <= 2 or "id" not in request.json: abort(400) id = request.json["id"] user = User.query.filter_by(id=id).first_or_404() if "username" in request.json: user_t = User.query.filter_by(username=request.json["username"]).first() if user_t is None: user.username = request.json["username"] else: return {"status": "error", "message": "username has been used"}, 403 if "password" in request.json: user.password = request.json["password"] user.hash_password(user.password) if "birthday" in request.json: user.birthday = request.json["birthday"] if "email" in request.json: user.email = request.json["email"] if "province" in request.json: user.province = request.json["province"] if "district" in request.json: user.district = request.json["district"] if "sex" in request.json: user.sex = request.json["sex"] if "name" in request.json: user.name = request.json["name"] db.session.commit() return {"status": "ok"}, 200
def get(self, item_id): try: instance = self.build_query().filter_by(id=item_id).one() except NoResultFound: abort(404, message="Movie {} doesn't exist".format(item_id)) else: return marshal(instance, fields=self.fields), 200
def find(self): recipe = session.query(Recipe).filter(Recipe.done == False).order_by(func.random()).first() if not recipe: abort(500, message="Every recipe was done. Nice.") recipe.done = True session.commit() return recipe
def get_by_slug(self, track_slug): track = Track.query.filter_by(slug=track_slug).first() if not track: abort(404) return track
def get(self, groupname, name, terms): """ Search for articles within a feed. """ key = auth() parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("per_page",type=int, help="", required=False, default=10) # parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first() if not fg: restful.abort(404) f = [f for f in fg.feeds if f.name == name] if not f: abort(404) f = f[0] return [a.jsonify() for a in \ Article.query.filter( and_(Article.feed == f, Article.title.like("%" + terms + "%"))) .order_by(desc(Article.created)).paginate(args.page, args.per_page).items ]
def get_by_slug(self, album_slug): album = Album.query.filter_by(slug=album_slug).first() if not album: abort(404) return album
def get_one(self, track_id): track = Track.query.get(track_id) if not track: abort(404) return track
def post(self, measurement_type): if measurement_type == 'rate': self.reqparse.add_argument('rate', type=int, location='json', required=True) elif measurement_type == 'pressure': self.reqparse.add_argument('pulse', type=int, location='json', required=True) self.reqparse.add_argument('systolic', type=int, location='json', required=True) self.reqparse.add_argument('diastolic', type=int, location='json', required=True) else: return abort(400) args = self.reqparse.parse_args() user = User.query.get(args['user_id']) if not user: return abort(400) if measurement_type == 'rate': result = FrequencyMeasurement() result.user = user result.rate = args['rate'] else: result = Measurement() result.user = user result.pulse = args['pulse'] result.systolic = args['systolic'] result.diastolic = args['diastolic'] db.session.add(result) db.session.commit() return marshal(result, result.marshal_fields)
def get_one(self, album_id): album = Album.query.get(album_id) if not album: abort(404) return album
def delete(self): id = self.args["id"] if not id: abort(500) else: message = Message.objects.get(id=id) message.delete()
def get(self, group_id): group = Group.query.filter(Group.id == group_id).first() if not group: abort(404, message="Group {} doesn't exist".format(group_id)) users = Group.query.filter(Group.id == group_id).first().users return users, 200
def process_input(self, deposition, draft_id=None): """ Process input data """ # If data provided, process it if request.json: if draft_id is None: # Defaults to `_default' draft id unless specified draft = deposition.get_or_create_draft( request.json.get( 'draft_id', deposition.get_default_draft_id() ) ) else: draft = deposition.get_draft(draft_id) # Process data dummy_form, validated, result = draft.process( request.json.get('metadata', {}), complete_form=True ) # Validation failed to abort if not validated: abort( 400, message="Bad request", status=400, errors=filter_draft_errors(result), ) if validated and request.json.get('completed', False): draft.complete()
def get(self, oauth, resource_id, file_id): """ Get a deposition file """ d = Deposition.get(resource_id, user=current_user) df = d.get_file(file_id) if df is None: abort(404, message="File does not exist", status=404) return d.type.marshal_file(df)
def get_one(self, artist_id): artist = Artist.query.get(artist_id) if not artist: abort(404) return artist
def get_by_slug(self, artist_slug): artist = Artist.query.filter_by(slug=artist_slug).first() if not artist: abort(404) return artist
def get(self, user_id): user = session.query(User).filter(User.id == user_id).first() if not user: abort(404, message="User {} doesn't exist".format(user_id)) groups = session.query(Group).join(Group.users).filter(User.id == user_id).all() return groups, 200
def post(self, job_id): # check permissions if not g.admin: owner = g.job_manager.get_job_owner(g.db, job_id) if not owner == g.user_id: abort(403) # carry out request o = json.load(request.stream) if not (isinstance(o, dict) and 'command' in o): abort(400) cmd = o['command'] if cmd == 'terminate': try: g.job_manager.terminateJob(g.db, job_id) except KeyError: abort(400, message=('Job %d is not running.' % job_id)) return '', 204 elif cmd == 'kill': try: g.job_manager.killJob(g.db, job_id) except KeyError: abort(400, message=('Job %d is not running.' % job_id)) return '', 204 abort(400, message=('No such command: "%s"' % cmd))
def get(self, dashboard_slug=None): try: dashboard = models.Dashboard.get_by_slug(dashboard_slug) except models.Dashboard.DoesNotExist: abort(404) return dashboard.to_dict(with_widgets=True)
def post(self, name): abort(404, message="Post not Allowed")
def parse_args(self, region=None, uri=None): args = self.parsers['get'].parse_args() if args.get('max_duration_to_pt') is not None: # retrocompatibility: max_duration_to_pt override all individual value by mode args['max_walking_duration_to_pt'] = args['max_duration_to_pt'] args['max_bike_duration_to_pt'] = args['max_duration_to_pt'] args['max_bss_duration_to_pt'] = args['max_duration_to_pt'] args['max_car_duration_to_pt'] = args['max_duration_to_pt'] if args['data_freshness'] is None: # retrocompatibilty handling args['data_freshness'] = \ 'adapted_schedule' if args['disruption_active'] is True else 'base_schedule' # TODO : Changer le protobuff pour que ce soit propre if args['destination_mode'] == 'vls': args['destination_mode'] = 'bss' if args['origin_mode'] == 'vls': args['origin_mode'] = 'bss' # for last and first section mode retrocompatibility if 'first_section_mode' in args and args['first_section_mode']: args['origin_mode'] = args['first_section_mode'] if 'last_section_mode' in args and args['last_section_mode']: args['destination_mode'] = args['last_section_mode'] if region: if uri: objects = uri.split('/') if objects and len(objects) % 2 == 0: args['origin'] = objects[-1] else: abort(503, message="Unable to compute journeys " "from this object") #we transform the origin/destination url to add information if args['origin']: args['origin'] = transform_id(args['origin']) if args['destination']: args['destination'] = transform_id(args['destination']) if not args['datetime']: args['datetime'] = args['_current_datetime'] args['original_datetime'] = args['datetime'] if args.get('traveler_type'): traveler_profile = TravelerProfile.make_traveler_profile( region, args['traveler_type']) traveler_profile.override_params(args) # We set default modes for fallback modes. # The reason why we cannot put default values in parser_get.add_argument() is that, if we do so, # fallback modes will always have a value, and traveler_type will never override fallback modes. if args.get('origin_mode') is None: args['origin_mode'] = ['walking'] if args.get('destination_mode') is None: args['destination_mode'] = ['walking'] return args
def nufun(*args, **kwargs): if not (debug or current_user.authenticated()): abort(401, message="authentication required") return func(*args, **kwargs)
def get(self, name): try: return g.applications.get(name) except Exception as e: restful.abort(404)
def get(self, host): try: return g.global_config.get_host(host) except: restful.abort(404)
def put(self, name): abort(404, message="PUT not Allowed")
def delete(self, name): abort(404, message="DELETE not Allowed")
def get(self, slug): post = Post.query.filter_by(slug=slug).first() if not post: abort(404, message="Post {} doesn't exist".format(slug)) return post
def handle_request_parsing_error(err): abort(422, errors=err.messages)
def abort_if_name_doesnt_exist(name): if name not in DNS: abort(404, message="name {} doesn't exist".format(name))
class SlackCommand(object): def __init__(self, slack): """ Description: Initializes valididating the proper permissions to execute a command. Args: slack (object): popsapi.models.Slack object. Used to load permissions. """ self.is_valid = False # check if slack object is exists if not slack: abort(401, message='Unauthorized') # auth ok self.is_valid = True self.slack = slack def _slack_usage(self): return " Supported Custom Slack Commands:\ getqueue: Get queues from single/multiple servers or entire environment.\ You may use filters throught tags.\ \ slack usage: /getqueue [options]\ options:\ -environment [environment] : environment name.\ -server [environment] : server name.\ -tag [tag] : tag name.\ \ examples:\ /getqueue\ /getqueue -environment corporation\ /getqueue -server mta-in.mailserver.com -server mta-out.mailserver.com\ /getqueue -environment corporation -tag inbound\ /getqueue -tag outbound -tag inbound\ \ gettasks: Get tasks from single/multiple servers or entire environment. You may \ apply filters in your search.\ \ slack usage: /gettasks [options]\ options:\ -environment [environment] : environment name.\ -server [environment] : server name.\ -status [pending|completed|error] : task status.\ -action %s : task action.\ examples:\ /gettasks\ /gettasks -environment corporation -status pending\ /gettasks -server mta-in.mailserver.com\ /gettasks -action purge\ /gettasks -environment -action purge\ " % cfg['slack']['actions'] def run(self, cmd, args, method): """ Description: This method parse the command and arguments from Slack and use POPS API to retrieve information or create tasks. Args: cmd (str) : 'command' requested. 'command' payload or url arg. args (str)) : command arguments. 'text' payload or url arg. """ if not self.is_valid: logger.error('slack object not valid is_valid = %s' % self.is_valid) abort(401, message='Unauthorized') # check if slack object has sufficient permission to run the command. logger.info("trying:[%s == %s] token[%s]" % (cmd, self.slack.command, self.slack.token)) if not cmd == self.slack.command: logger.error('Unauthorized: command allowed in slack object: %s' % cmd) abort(401, message='Unauthorized') # Parsing arguments reqdata = {} parser = argparse.ArgumentParser(conflict_handler='resolve') # common arguments parser.add_argument('-environment', action='append') parser.add_argument('-server', action='append') if cmd == '/getqueue': parser.add_argument('--tag', action='append') resource = QueueResource() elif cmd == '/gettasks': parser.add_argument('-action', action='append') parser.add_argument('-status', action='append') resource = TaskResource() elif cmd == '/createtask': parser.add_argument('-action', required=True) parser.add_argument('-user', required=True) resource = TaskResource() else: logger.error('command not supported: %s' % cmd) abort(400, message='command not supported %s' % cmd) try: argument = parser.parse_args(args.split()) except Exception, e: logger.error('something went wrong: %s' % e) return self._slack_usage() # definie environments to lookup. environments = [] all_rule = False # all environments allowed if 'all' in self.slack.environment: for envobj in Environment.objects.only('name'): environments.append(str(envobj.name)) all_rule = True # when not specified takes the ones in slack object. if not argument.environment: environments = self.slack.environment # only lookup the environments specified. else: environments = argument.environment for environment in environments: # check if command can be performed in the current environment scope. if environment not in self.slack.environment and all_rule == False: logger.error('Forbidden. Environment not allowed: %s' % environment) abort(403, message='Forbidden: Environment not allowed %s' % environment) # add server try: reqdata['server'] = argument.server except: pass # add tag try: reqdata['tag'] = argument.tag except: pass # add action try: reqdata['action'] = argument.action except: pass # # add status try: reqdata['status'] = argument.status except: pass # add destination try: reqdata['destination'] = argument.user except: pass if method == 'get': response = resource.get(environment, reqdata) if method == 'post': response = resource.post(environment, reqdata) return response
print "token:%s\nteam_id:%s\nchannel_id:%s\ncommand:%s\ntext:%s\n" % (reqdata['token'], reqdata['team_id'], reqdata['channel_id'], reqdata['command'], reqdata['text']) if slack: try: slackcmd = SlackCommand(slack) res = slackcmd.run(reqdata['command'], reqdata['text'], 'get') except Exception, e: logger.error('something went wrong trying to execute slack command: %s' % e) return { 'response' : 'Internal Server Error' }, 500 return res, 200 else: abort(401, message='Unauthorized: insufficient privileges') def post(self): self.parser.add_argument('token', type=str, required=True) self.parser.add_argument('team_id', type=str) self.parser.add_argument('team_domain', type=str) self.parser.add_argument('channel_id', type=str) self.parser.add_argument('channel_name', type=str) self.parser.add_argument('user_id', type=str) self.parser.add_argument('user_name', type=str) self.parser.add_argument('command', type=str) self.parser.add_argument('text', type=str) reqdata = self.parser.parse_args() # # validation
def abort_if_edge_doesnt_exist(edge_id): if edge_id not in edges: abort(404, message="Edge {} doesn't exist".format(edge_id))
def get(self, id): event = session.query(Event).filter(Event.id == id).first() if not event: abort(404, message="Event {} doesn't exist".format(id)) return event
def get(self, group_id): try: group = self.get_group(group_id) except: abort(404, message="group not found") return {'status': 'ok', 'group': group.to_dict()}
def abort_if_not_fully_connected(edge_distances): if -1 in edge_distances['value']: abort(404, message="The graphs is not Fully Connected")
def init_deep_target(target_path, depth): identifier = target_path[0] depth += 1 if self.target_list: identifier_field = None self.target_serializer = ( self.target_serializer.sub_field.sub_serializer) for fieldname, field in ( self.target_serializer._fields().items()): if field.identifier: identifier_field = fieldname identifier = field.deserialize(identifier) if identifier_field: for i, document in enumerate(self.target_list): if getattr(document, identifier_field) == identifier: self.target_parent_document = None self.target_parent_list = self.target_list self.target_document = self.target_list[i] self.target_list = None self.target_document_obj = ( self.target_document_obj.field. document_type) break if not self.target_document: if request.method == 'PUT' and len( target_path) == 1: self.create = target_path[0] self.create_identifier_field = identifier_field self.target_document_obj = self.target_document_obj.field.document_type else: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) else: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) else: try: self.target_serializer = getattr( self.target_serializer, identifier) except AttributeError: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) if isinstance(self.target_serializer, serializer_fields.DocumentField): self.target_serializer = self.target_serializer.sub_serializer self.target_document_obj = getattr( self.target_document_obj, identifier) if isinstance(self.target_document_obj, fields.EmbeddedDocumentField): self.target_document_obj = self.target_document_obj.document_type self.target_parent_document = self.target_document self.target_parent_list = None self.target_list = None self.target_document = getattr(self.target_document, identifier) if isinstance(self.target_document_obj, fields.ListField): self.target_list = self.target_document self.target_document = None target_path.pop(0) if target_path: init_deep_target(target_path, depth)
def abort_if_node_doesnt_exist(node_id): if node_id not in nodes: abort(404, message="Node {} doesn't exist".format(node_id))
def _init_target(self): """ Initiates the target document and target serializer. """ self.target_document_obj = self.document target_path = self.target_path[:] self.target_parent = None self.target_document = None self.is_base_document = True self.target_serializer = self.serializer self.base_document = self.get_base_document() if self.base_document: self.target_list = None self.target_document = self.base_document else: self.target_list = self.get_base_list() # Determines if the document should be created. If not `False`, # contains the value for the id for the new document. self.create = False if target_path: if not self.base_document: identifier = target_path[0] try: self.base_document = self.get_base_document_by_identifier( identifier) except DoesNotExist: if request.method == 'PUT': # If method is PUT, it should create the resource # at this location. self.create = identifier else: abort( 404, message=( "The resource specified with identifier '{}' " "could not be found".format(identifier))) except ValidationError: abort( 400, message=( "The formatting for the identifier '{}' is invalid" .format(identifier))) self.target_document = self.base_document self.target_list = None target_path.pop(0) if target_path: self.is_base_document = False self.create = False def init_deep_target(target_path, depth): identifier = target_path[0] depth += 1 if self.target_list: identifier_field = None self.target_serializer = ( self.target_serializer.sub_field.sub_serializer) for fieldname, field in ( self.target_serializer._fields().items()): if field.identifier: identifier_field = fieldname identifier = field.deserialize(identifier) if identifier_field: for i, document in enumerate(self.target_list): if getattr(document, identifier_field) == identifier: self.target_parent_document = None self.target_parent_list = self.target_list self.target_document = self.target_list[i] self.target_list = None self.target_document_obj = ( self.target_document_obj.field. document_type) break if not self.target_document: if request.method == 'PUT' and len( target_path) == 1: self.create = target_path[0] self.create_identifier_field = identifier_field self.target_document_obj = self.target_document_obj.field.document_type else: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) else: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) else: try: self.target_serializer = getattr( self.target_serializer, identifier) except AttributeError: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) if isinstance(self.target_serializer, serializer_fields.DocumentField): self.target_serializer = self.target_serializer.sub_serializer self.target_document_obj = getattr( self.target_document_obj, identifier) if isinstance(self.target_document_obj, fields.EmbeddedDocumentField): self.target_document_obj = self.target_document_obj.document_type self.target_parent_document = self.target_document self.target_parent_list = None self.target_list = None self.target_document = getattr(self.target_document, identifier) if isinstance(self.target_document_obj, fields.ListField): self.target_list = self.target_document self.target_document = None target_path.pop(0) if target_path: init_deep_target(target_path, depth) init_deep_target(target_path, 0)
def post(self, *args, **kwargs): """ Processes a HTTP POST request. Expects a JSON object that matches the serializer's fields or a JSON array of these objects. Returns the object/objects that was/were created, serialized into JSON format by the serializer. """ if self.target_list is None: # If the target is not at a list, then it's at an item, and # you can't update items with POST. abort(405, message=("Can't update an item with POST, use PUT instead.")) request_data = self._request_data() if isinstance(request_data, list): # Process multiple documents response = [] if self.is_base_document: documents = [] for item in request_data: documents.append(self._process_document(item)) # If we come here, it means `_process_document()` didn't # `abort()`, so the request data was not malformed, so we # can save the documents now. for document in documents: self._save_document(document) response.append(self.target_serializer.serialize(document)) else: new_documents = [] for item in (self.target_serializer.deserialize(request_data)): document = self.target_document_obj.field.document_type( **item) new_documents.append(document) self.target_list.append(document) self._save_document(self.base_document) response = self.target_serializer.serialize(new_documents) else: if self.is_base_document: document = self._process_document(request_data) self._save_document(document) response = self.target_serializer.serialize(document) else: document = self.target_document_obj.field.document_type( **self.target_serializer.sub_field.deserialize( request_data)) self.target_list.append(document) self._save_document(self.base_document) response = self.target_serializer.sub_field.serialize(document) return self.make_response(response, 201)
def safe_send(x, fail_message): try: return x except Exception as e: print(e) return abort(404, message=fail_message)
class MongoEngineResource(Resource): # The name of this resource name = None # The description of this resource description = None # Information about the URL params on this resource params_info = None # The amount of items on one page of the listview of a document items_per_page = 100 # The query param used for paging page_number_query_param = 'page' # The content type this resource accepts accepted_content_type = 'application/json' # The charset this resource accepts accepted_charset = 'charset=utf-8' # The headers that should be included in the response headers = {} # The key for the identifier field in case a document needs to be # created. create_identifier_field = 'id' def __init__(self, *args, **kwargs): # Instantiate the serializer self.serializer = self.serializer() if not self.name: self.name = self.__class__.__name__ # A list of reserved query params. These params can't be used # for filters. self.reserved_query_params = [self.page_number_query_param] super(MongoEngineResource, self).__init__(*args, **kwargs) def html_output(self, data): """ Returns a nice looking HTML resource page. This is handy for developers that will implement the resource. """ template_path = '{}'.format( os.path.join(os.path.dirname(__file__), 'templates', 'resource.html')) with open(template_path) as f: template = f.read() context = { 'name': self.name, 'docs_url': '{}!!'.format(self.get_base_url()), 'data': json.dumps(data, indent=4) } return render_template_string(template, **context) def dispatch_request(self, *args, **kwargs): self.init_target_path(*args, **kwargs) if len(self.target_path) == 1 and self.target_path[0] == '!!': return self.html_doc() else: self.authenticate() self.check_request_content_type_header() self._init_target() return super(MongoEngineResource, self).dispatch_request(*args, **kwargs) def init_target_path(self, *args, **kwargs): self.target_path = [] if 'path' in kwargs: self.target_path = kwargs['path'].split('/') # the last item is usually an empty entry (because it splits # on the last slash too) so if it's empty, pop it. if self.target_path and not self.target_path[-1]: self.target_path.pop() def make_response(self, data, status_code=200, extra_headers={}): """ Returns the response parameters based on the parameters given. Will update the `self.headers` dict with the `extra_headers`. """ self.headers.update(extra_headers) if 'text/html' in dict(request.accept_mimetypes).keys(): return make_response(self.html_output(data)) else: return data, status_code, self.headers def authenticate(self): """ Placeholder for authenticating requests. If you want custom authentication, overwrite this method. If authentication fails, you should call `abort()` yourself (or something else you like). """ return def check_request_content_type_header(self): """ Checks if the content type header in the request is correct. """ content_type = request.headers['content-type'].split(';') self.check_request_content_type(content_type[0].strip()) if len(content_type) == 2: self.check_request_charset(content_type[1].strip()) def check_request_content_type(self, content_type): """ Checks if the content type (without the charset) in the request is correct. """ if (request.method in ('POST', 'PUT') and content_type != self.accepted_content_type): abort( 415, message=( "Invalid Content-Type header '{}'. This resource " "only supports 'application/json'.".format(content_type))) def check_request_charset(self, charset): """ Checks if the charset in the request is correct. """ if charset != self.accepted_charset: abort( 415, message= ("Invalid charset in Content-Type header '{}'. This resource " "only supports 'charset=utf-8'.".format(charset))) def request_headers(self): """ Returns a dict of request headers that are accepted by this resource. """ return { 'Content-Type': { 'description': ("The media type of the body.\n" "\n" "Specification:\n" "\n" "http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.17" ), 'required': True, } } def response_headers(self): return { 'Link': { 'description': ("The pagination links. Only returned on GET requests.\n" "\n" "Specification:\n" "\n" "http://tools.ietf.org/html/rfc5988#section-5" "\n" "This implementation is inspired by the pagination \n" "implementation of GitHub. See their docs for more info:\n" "\n" "https://developer.github.com/v3/#pagination"), } } def _init_target(self): """ Initiates the target document and target serializer. """ self.target_document_obj = self.document target_path = self.target_path[:] self.target_parent = None self.target_document = None self.is_base_document = True self.target_serializer = self.serializer self.base_document = self.get_base_document() if self.base_document: self.target_list = None self.target_document = self.base_document else: self.target_list = self.get_base_list() # Determines if the document should be created. If not `False`, # contains the value for the id for the new document. self.create = False if target_path: if not self.base_document: identifier = target_path[0] try: self.base_document = self.get_base_document_by_identifier( identifier) except DoesNotExist: if request.method == 'PUT': # If method is PUT, it should create the resource # at this location. self.create = identifier else: abort( 404, message=( "The resource specified with identifier '{}' " "could not be found".format(identifier))) except ValidationError: abort( 400, message=( "The formatting for the identifier '{}' is invalid" .format(identifier))) self.target_document = self.base_document self.target_list = None target_path.pop(0) if target_path: self.is_base_document = False self.create = False def init_deep_target(target_path, depth): identifier = target_path[0] depth += 1 if self.target_list: identifier_field = None self.target_serializer = ( self.target_serializer.sub_field.sub_serializer) for fieldname, field in ( self.target_serializer._fields().items()): if field.identifier: identifier_field = fieldname identifier = field.deserialize(identifier) if identifier_field: for i, document in enumerate(self.target_list): if getattr(document, identifier_field) == identifier: self.target_parent_document = None self.target_parent_list = self.target_list self.target_document = self.target_list[i] self.target_list = None self.target_document_obj = ( self.target_document_obj.field. document_type) break if not self.target_document: if request.method == 'PUT' and len( target_path) == 1: self.create = target_path[0] self.create_identifier_field = identifier_field self.target_document_obj = self.target_document_obj.field.document_type else: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) else: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) else: try: self.target_serializer = getattr( self.target_serializer, identifier) except AttributeError: abort( 404, message= ("The resource specified with identifier '{}' could not be " "found".format(identifier))) if isinstance(self.target_serializer, serializer_fields.DocumentField): self.target_serializer = self.target_serializer.sub_serializer self.target_document_obj = getattr( self.target_document_obj, identifier) if isinstance(self.target_document_obj, fields.EmbeddedDocumentField): self.target_document_obj = self.target_document_obj.document_type self.target_parent_document = self.target_document self.target_parent_list = None self.target_list = None self.target_document = getattr(self.target_document, identifier) if isinstance(self.target_document_obj, fields.ListField): self.target_list = self.target_document self.target_document = None target_path.pop(0) if target_path: init_deep_target(target_path, depth) init_deep_target(target_path, 0) def get_base_document(self): """ Returns the base document. By default this is `None` because by default the base resource returns a list of documents (returned by `get_base_list`). However, if this method returns a document, the base resource returns this document instead of a list. """ return None def get_base_document_by_identifier(self, identifier): """ Returns the base document that matches the provided `identifier`. By default matches on the `id` field of the document. You can overwrite this method if you want to alter this behavior. This method is allowed to throw these MongoEngine exceptions: - DoesNotExist - ValidationError These will be catched and handled correctly. """ return self.target_document_obj.objects.get(id=identifier) def get_base_list(self): """ Returns the base list of documents. By default it will return all the documents that are associated to the document Class `self.target_document`. You can overwrite this method to limit the base documents exposed in this resource. """ return self.document.objects def _apply_paging(self, documents): """ Applies paging to the provided `documents` and adds related headers to the response object. Paging is based on the value of the param of the name `self.page_number_query_param` if it is given, else defaults to the first page. If this param contains an invalid or an out of range value, will abort with a 400 or a 404 respectively. """ def get_total_pages(documents): """ Returns the total amount of pages. """ total_pages = int(ceil(documents.count() / self.items_per_page)) # Even if there are no documents, there should be at least one page if total_pages == 0: total_pages = 1 return total_pages total_pages = get_total_pages(documents) try: page = self._get_page(total_pages) except InvalidPageParamFormat, error: abort(400, message="Invalid page '{}'".format(error.param)) except PageOutOfRange, error: abort(404, message="Page '{}' is out of range".format(error.param))
def delete(self, id=None): if id == 'me': abort(HTTP.METHOD_NOT_ALLOWED) return super(UserResource, self).delete(id)
message = ("There is no field '{}'{} on this resource.".format( error.fieldname, parent_traceback(error.parents))) abort(400, message=message) except ValueInvalidType, error: message = ( "The value for field '{}'{} is of type '{}' but should be of " "type '{}'.".format(error.field.name, parent_traceback(error.parents), json_type(error.value), json_type(error.field.deserialize_type))) abort(400, message=message) except ValueInvalidFormat, error: message = ("The value '{}' for field '{}'{} could not be parsed. " "Note that it should be in {} format.".format( error.value, error.field.name, parent_traceback(error.parents), error.format_name)) abort(400, message=message) except DataInvalidType, error: if error.parents: parents = list(error.parents)
class TrackResource(Resource): """ The resource responsible for tracks. """ db_model = Track def get_resource_fields(self): return { 'id': fields.String(attribute='pk'), 'uri': InstanceURI('tracks'), 'files': TrackFiles, 'bitrate': fields.Integer, 'length': fields.Integer, 'title': fields.String, 'slug': fields.String, 'artists': ManyToManyField(Artist, { 'id': fields.String(attribute='pk'), 'uri': InstanceURI('artists'), }), 'albums': ManyToManyField(Album, { 'id': fields.String(attribute='pk'), 'uri': InstanceURI('albums'), }), 'ordinal': fields.Integer, } def post(self): params = { 'title': request.form.get('title', '').strip(), 'artists': request.form.getlist('artist_id'), 'albums': request.form.getlist('album_id'), 'ordinal': request.form.get('ordinal'), } if 'track' not in request.files: abort(HTTP.BAD_REQUEST) try: track = self.create(**params) except (IntegrityError, ObjectExistsError): abort(HTTP.CONFLICT) response = marshal(track, self.get_resource_fields()) headers = {'Location': url_for('tracks', id=track.pk)} return response, 201, headers def create(self, title, artists, albums, ordinal): UploadHandler = app.config.get('UPLOAD_HANDLER') try: handler = UploadHandler(track=request.files.get('track')) except InvalidFileTypeError, e: abort(HTTP.UNSUPPORTED_MEDIA_TYPE) handler.save() hash_file = parse_bool(request.args.get('hash_file', True)) no_metadata = parse_bool(request.args.get('no_metadata', False)) track = Track(path=handler.path, hash_file=hash_file, no_metadata=no_metadata) db.session.add(track) # If an artist (or album) is given as argument, it will take precedence # over whatever the file's metadata say. artist_list = [] if artists: try: artist_list.extend(get_list(Artist, artists)) except ValueError: abort(HTTP.BAD_REQUEST) else: if handler.artist: artist_list.append(get_by_name(Artist, handler.artist)) album_list = [] if albums: try: album_list.extend(get_list(Album, albums)) except ValueError: abort(HTTP.BAD_REQUEST) else: if handler.album: artist_list.append(get_by_name(Album, handler.album)) for artist in artist_list: db.session.add(artist) artist.tracks.append(track) for album in album_list: db.session.add(album) album.tracks.append(track) db.session.commit() return track
class TokenAuthentication(): app_name = 'cttv-rest-api' EXPIRED = 'expired' @staticmethod def _autenticate(auth_data): if auth_data['secret'] and auth_data['app_name']: auth_key = AuthKey(**auth_data) if current_app.extensions['redis-user'].exists(auth_key.get_key()): auth_key.get_loaded_data() domain = get_domain() if auth_key.domain: for allowed_domain in auth_key.domain.split('|'): if allowed_domain[0] == '*': if domain.endswith(allowed_domain[1:]): return True else: if domain == allowed_domain: return True else: return True return False @staticmethod def _prepare_payload(api_name, auth_data): payload = { 'api_name': api_name, 'app_name': auth_data['app_name'], 'secret': auth_data['secret'], 'domain': get_domain() } if 'uid' in auth_data: payload['uid'] = auth_data['uid'] return payload @staticmethod def get_payload_from_token(token): s = Serializer(current_app.config['SECRET_KEY']) cipher = AESCipher(current_app.config['SECRET_KEY'][:16]) try: data = json.loads(cipher.decrypt(s.loads(token))) return data except SignatureExpired, se: time_offset = (datetime.now() - se.date_signed).total_seconds() current_app.logger.error( 'token expired: %s. signature date %s. offset with current date = %s' % (se.message, str(se.date_signed), str(time_offset))) current_app.logger.error( 'current date %s, token date %s' % (str(datetime.now()), str(se.date_signed))) if -1 <= time_offset < 0: #allow for 1 seconds out of sync machines current_app.logger.info( 'token time offset within grace period. allowing auth') return json.loads(cipher.decrypt(se.payload)) else: LogApiTokenExpired() # raise SignatureExpired(se) raise TokenExpired() # abort(419, message = 'Authentication expired.') except BadSignature, e: current_app.logger.error('bad signature in token') encoded_payload = e.payload if encoded_payload is not None: try: decoded_payload = s.load_payload(encoded_payload) payload = json.loads(cipher.decrypt(decoded_payload)) LogApiTokenInvalid(payload) except BadData: LogApiTokenInvalid( dict(error='bad data in token', token=token)) abort(401, message='bad signature in token')
def abort_if_todo_doesnt_exist(todo_id): if todo_id not in TODOS: abort(404, message="Todo {} doesn't exist".format(todo_id))
def create(self, title, artists, albums, ordinal): UploadHandler = app.config.get('UPLOAD_HANDLER') try: handler = UploadHandler(track=request.files.get('track')) except InvalidFileTypeError, e: abort(HTTP.UNSUPPORTED_MEDIA_TYPE)
def get(self, query_id): q = models.Query.get(models.Query.id == query_id) if q: return q.to_dict(with_visualizations=True) else: abort(404, message="Query not found.")
def get_user(self, username): user = User.query.filter_by(username=username).first() if not user: abort(404, message="User {} doesn't exist".format(username)) return user