def wrapper(*args, **kwargs): print(current_user) if not current_user.is_authenticated: return abort(401) if not current_user.is_administrator: return abort(403) return func(*args, **kwargs)
def get(self, record_id, tag_name): """A GET request. :param record_id: the identifier of the record :param tag_name: the name of the tag """ abort(405)
def options(self, record_id, tag_name): """A OPTIONS request. :param record_id: the identifier of the record :param tag_name: the name of the tag """ abort(405)
def authenticate(self): """ Verifies user's token and his/her accessibility to requested resources. Once token is validated, the role of user (flask.g.is_admin) and his/her scope (flask.g.org_uuid_list) is set up for current request. Raises Unauthorized when token is missing, invalid, expired or not signed by UAA Raises Forbidden: when org guid is missing, invalid or user can't access this org """ if not self._uaa_public_key: self._get_token_verification_key() if any(exc in str(flask.request.path) for exc in self.auth_exceptions): return try: token = self._get_token_from_request() token_payload = self._parse_auth_token(token) except (_MissingAuthToken, jwt.InvalidTokenError) as ex: self._log.warn(str(ex)) abort(401) flask.g.is_admin = self._is_admin(token_payload) try: flask.g.org_uuid_list = self._authorization.get_user_scope( token, flask.request, flask.g.is_admin) except (_InvalidOrgId, _CloudControllerConnectionError, _UserCantAccessOrg): self._log.exception('Failed to authenticate the user.') abort(403)
def head(self, record_id, tag_name): """A HEAD request. :param record_id: the identifier of the record :param tag_name: the name of the tag """ abort(405)
def put(self, slug, identifier): """Update the task identified by 'identifier' from the challenge identified by 'slug'""" # initialize the parser parser = reqparse.RequestParser() parser.add_argument('action', type=str, help='action cannot be parsed') parser.add_argument('editor', type=str, help="editor cannot be parsed") args = parser.parse_args() # get the task task = get_task_or_404(slug, identifier) # append the latest action to it. task.append_action(Action(args.action, session.get('osm_id'), args.editor)) merged_t = db.session.merge(task) db.session.add(merged_t) try: db.session.commit() except Exception as e: if type(e) == IntegrityError: app.logger.warn(e) db.session.rollback() abort(409, message='The session and the database did not agree for task identifier {identifier}: {message}'.format(id=task.identifier, message=e)) else: app.logger.warn(e) abort(500, message=message_internal_server_error) return {}, 200
def patch(self, record_id, tag_name): """A PATCH request. :param record_id: the identifier of the record :param tag_name: the name of the tag """ abort(405)
def post(self): if 'username' in session: print request.form site = dict.fromkeys(['name', 'description', 'overview']) site['name'] = request.form['name'] site['description'] = request.form['description'] site['overview'] = request.form['overview'] site['location'] = dict.fromkeys(['x', 'y']) site['location']['x'] = request.form['x'] site['location']['y'] = request.form['y'] # site.location = { # 'x': request.form['location']['x'], # 'y': request.form['location']['y'] # } # site = {"author": "Mike", # "text": "My first blog post!", # "tags": ["mongodb", "python", "pymongo"]} sites = mongo.db.sites site_id = sites.insert_one(site).inserted_id new_site = sites.find_one({"_id": site_id}) new_site["_id"] = str(new_site["_id"]) record = ({"Result": "OK", "Record": new_site}) return jsonify(record) abort(403, message="Request Failed")
def get(self, item_id): item = Item.query.get(item_id) if item is None: abort(404) return item
def get_model_or_404(manager, pk): try: model = manager.find_one(pk) except NotFoundError: abort(404, message="Entity not found") else: return model
def put(self, name=None): if not name: abort(404, message="A group name is required.") args = parser.parse_args() group = Group.objects(name__iexact=name).first() if not group: abort(404, message="A group with that name does not exist.") group.full_name=args['full_name'] if args['image'] and validate_file(args['image'].filename): if group.image: group.image.replace(args['image'], content_type=args['image'].content_type) else: group.image.put(args['image'], content_type=args['image'].content_type) response = { 'name': group.name, 'full_name': group.full_name, 'image': str(group.image.grid_id) if group.image.grid_id else None } group.save() return response
def get(self, user, name): assign = self.model.by_name(name) if not assign: restful.abort(404) elif not self.model.can(assign, user, 'view'): restful.abort(403) return assign
def wrapper(*args, **kwargs): # Public methods do not need authentication if not getattr(func, 'public', False) and not current_user.is_authenticated: restful.abort(401) # The login manager takes care of converting a token to a user. kwargs['user'] = current_user return func(*args, **kwargs)
def get(self, user, name): assign = models.Assignment.by_name(name) if not assign: if user.is_admin: return restful.abort(404) return restful.abort(403) if not self.model.can(assign, user, 'export'): return restful.abort(403) students, subms, no_subms = assign.course_submissions() output = [] subm_keys = sorted(list(subms)) for s_id in subm_keys: output.append(models.Backup.query.get(s_id)) num_subms = len(output) for backup in output: backup.group = [models.User.get_by_id(uid) for uid in backup.owners()] data = {'backups': output, 'count': num_subms} return data
def get(self, user, email): target = models.User.lookup(email) if not self.can('view', target, user): restful.abort(403) if target: return {'courses': user.participations} return {'courses': []}
def get(self, resource_id, file_id): """Get a deposition file.""" d = Deposition.get(resource_id, user=current_user) df = d.get_file(file_id) if df is None: abort(404, message="File does not exist", status=404) return d.type.marshal_file(df)
def get(self, user, name, email): assign = models.Assignment.by_name(name) target = models.User.lookup(email) limit = request.args.get('limit', 150, type=int) offset = request.args.get('offset', 0, type=int) if not assign or not target: if user.is_admin: return restful.abort(404) return restful.abort(403) if not self.model.can(assign, user, 'export'): return restful.abort(403) base_query = (models.Backup.query.filter( models.Backup.submitter_id == target.id, models.Backup.assignment_id == assign.id, ).order_by(models.Backup.created.desc())) backups = base_query.limit(limit).offset(offset) num_backups = base_query.count() has_more = ((num_backups - offset) - limit) > 0 data = {'backups': backups.all(), 'count': num_backups, 'limit': limit, 'offset': offset, 'has_more': has_more} return data
def put(self, id): args = request.get_json(force=True) question = QuestionModel.query.filter_by(id=id).first_or_404() # Make sure required fields are there if args['descricao'] and args['id_area_conhecimento'] is not None: # # Make sure the fields are unique if QuestionModel.query.filter(QuestionModel.id != args['id']).\ filter((QuestionModel.descricao == args['descricao']) & (QuestionModel.id_area_conhecimento == args['id_area_conhecimento']) & (QuestionModel.id_concurso == args['id_concurso']) ).first(): abort(409, message="A question with this description and subject" + " already exists for this examination") else: question.descricao = args['descricao'] question.id_area_conhecimento = args['id_area_conhecimento'] else: abort(409, message="Missing fields") # Commit and return db.session.commit() question.id return question
def process_input(self, deposition, draft_id=None): """Process input data.""" # If data provided, process it if request.json: if draft_id is None: # Defaults to `_default' draft id unless specified draft = deposition.get_or_create_draft( request.json.get( 'draft_id', deposition.get_default_draft_id() ) ) else: draft = deposition.get_draft(draft_id) # Process data dummy_form, validated, result = draft.process( request.json.get('metadata', {}), complete_form=True ) # Validation failed to abort if not validated: abort( 400, message="Bad request", status=400, errors=filter_draft_errors(result), ) if validated and request.json.get('completed', False): draft.complete()
def get(self, consumer_id): try: _, c = self.twitter_service.router.routers[int(consumer_id)] return TwitterConsumerMeta(consumer_id, c.ident, c.is_alive()) except KeyError: abort(404, message="Consumer {} does not exist".format(consumer_id))
def get(self, cluster_id): try: hosts = db_api.get_hosts_by_cluster(cluster_id) except exceptions.NotFound as exc: abort(404, message=six.text_type(exc)) return hosts
def post(self): args = parse.parse_args() try: c_id = args['contact_id'] dcontact = Contact.query.get(c_id).first() except: abort(404, message="Contact id: %s doesn't exist" % (c_id)) inv = Invoice() inv.email = args['email'] inv.invoice_date = args['invoice_date'] inv.recipient_note = args['recipient_note'] inv.subtotal = args['subtotal'] inv.total = args['total'] inv.paid = False inv.contact_id = c_id import paypalrestsdk paypalrestsdk.configure({ "mode": "sandbox", # PAYPAL_MODE "client_id": app.config['PAYPAL_CLIENT_ID'], # PAYPAL_CLIENT_ID "client_secret": app.config['PAYPAL_CLIENT_SECRET'] }) # PAYPAL_CLIENT_SECRET paypal_invoice = paypalrestsdk.Invoice({ "merchant_info": { "email":"*****@*****.**", "first_name":"Francisco", "last_name": "Barcena", "business_name":"fdev.tk", "phone":{"country_code": "001","national_number":"5555555555"}, "address":{ "line1":"123 Fake St. Apt.A", "city":"Fake City", "country_code":"US", "state":"California" }, }, "billing_info":[{"email":request.form["email"]}], "note":"MAKE MONEY F*CK B*TCHES" }) # inv_lines (from list in args) the_items = [] for the_item in inv_lines: # append to the_items (for paypal) the_items.append(dict({"name":the_item.d_description.data,"quantity":str(the_item.qty.data),"unit_price":{"currency":"USD","value":str(the_item.unit_price.data)}})) # Create and append to Invoice model new_invoice_line = InvoiceLine() new_invoice_line.description new_invoice_line.quantity new_invoice_line.unit_price new_invoice_line.amount inv.invoice_lines.append(new_invoice_line) paypal_invoice.items = the_items error = None if paypal_invoice.create(): print('paypal invoice created') # Add invoice lines here (from list as argument) db.session.add(inv) db.session.commit() else: error = paypal_invoice.error abort(404, message="Invoice creation error: %s" % (error)) return inv, 201
def func_wrapper(*args, **kwargs): publication_id = kwargs['publication_id'] publication = Publication.query.get(publication_id) if publication is None: abort(404, message="Could not find the selected publication") g.publication = publication return f(*args, **kwargs)
def put(self, resource_id, file_id): """Update a deposition file - i.e. rename it.""" v = APIValidator() if not v.validate(request.json, file_schema): abort( 400, message="Bad request", status=400, errors=map(lambda x: dict(message=x, code=error_codes["validation_error"]), v.errors), ) d = Deposition.get(resource_id, user=current_user) df = d.get_file(file_id) if not d.type.authorize_file(d, df, "update_metadata"): raise ForbiddenAction("update_metadata", df) new_name = secure_filename(request.json["filename"]) if new_name != request.json["filename"]: abort( 400, message="Bad request", status=400, errors=[dict(message="Not a valid filename", code=error_codes["validation_error"])], ) df.name = new_name d.save() return d.type.marshal_file(df)
def embed(query_id, visualization_id, org_slug=None): # TODO: add event for embed access query = models.Query.get_by_id_and_org(query_id, current_org) require_access(query.groups, current_user, view_only) vis = query.visualizations.where(models.Visualization.id == visualization_id).first() qr = {} if vis is not None: vis = vis.to_dict() qr = query.latest_query_data if qr is None: abort(400, message="No Results for this query") else: qr = qr.to_dict() else: abort(404, message="Visualization not found.") client_config = {} client_config.update(settings.COMMON_CLIENT_CONFIG) qr = project(qr, ('data', 'id', 'retrieved_at')) vis = project(vis, ('description', 'name', 'id', 'options', 'query', 'type', 'updated_at')) vis['query'] = project(vis, ('created_at', 'description', 'name', 'id', 'latest_query_data_id', 'name', 'updated_at')) return render_template("embed.html", name=settings.NAME, base_href=base_href(), client_config=json_dumps(client_config), visualization=json_dumps(vis), query_result=json_dumps(qr), analytics=settings.ANALYTICS)
def post(self): # pylint:disable=no-self-use admin_required() name = name_parser.parse_args()["name"] if not TOPIC_NAME_RE.match(name): abort(400, error_message="Invalid topic name. Topic name can only contain A-Z, a-z, 0-9, - and _") topic_id = run_sns_command(sns.create_topic, Name=name) return {"topic_id": topic_id["TopicArn"]}
def put(self, resource_id): """Sort files in collection.""" if not isinstance(request.json, list): abort( 400, message="Bad request", status=400, errors=[dict(message="Expected a list", code=error_codes["validation_error"])], ) v = APIValidator() for file_item in request.json: if not v.validate(file_item, file_schema_list): abort( 400, message="Bad request", status=400, errors=map(lambda x: dict(message=x, code=error_codes["validation_error"]), v.errors), ) d = Deposition.get(resource_id, user=current_user) for file_item in request.json: if not d.get_file(file_item["id"]): raise FileDoesNotExists(file_item["id"]) # Sort files raise ForbiddenAction if not authorized d.sort_files(map(lambda x: x["id"], request.json)) d.save() return map(lambda f: d.type.marshal_file(f), d.files)
def parse_args(self, req=None, strict=False): """Parse all arguments from the provided request and return the results as a Namespace :param strict: if req includes args not in parser, throw 400 BadRequest exception """ if req is None: req = request namespace = self.namespace_class() # A record of arguments not yet parsed; as each is found # among self.args, it will be popped out req.unparsed_arguments = dict(self.argument_class('').source(req)) if strict else {} errors = {} for arg in self.args: value, found = arg.parse(req, self.bundle_errors) if isinstance(value, ValueError): errors.update(found) found = None if found or arg.store_missing: namespace[arg.dest or arg.name] = value if errors: flask_restful.abort(400, message=errors) if strict and req.unparsed_arguments: raise exceptions.BadRequest('Unknown arguments: %s' % ', '.join(req.unparsed_arguments.keys())) return namespace
def run_query(query, parameters, data_source, query_id, max_age=0): if data_source.paused: if data_source.pause_reason: message = '{} is paused ({}). Please try later.'.format(data_source.name, data_source.pause_reason) else: message = '{} is paused. Please try later.'.format(data_source.name) return error_response(message) try: query.apply(parameters) except InvalidParameterError as e: abort(400, message=e.message) if query.missing_params: return error_response(u'Missing parameter value for: {}'.format(u", ".join(query.missing_params))) if max_age == 0: query_result = None else: query_result = models.QueryResult.get_latest(data_source, query.text, max_age) if query_result: return {'query_result': query_result.to_dict()} else: job = enqueue_query(query.text, data_source, current_user.id, current_user.is_api_user(), metadata={ "Username": repr(current_user) if current_user.is_api_user() else current_user.email, "Query ID": query_id }) return {'job': job.to_dict()}
def put(self, id): args = request.get_json(force=True) examination = ExaminationModel.query.filter_by(id=id).first_or_404() # Make sure required fields are there if args['nome'] and args['ano'] and args['semestre'] is not None: # Make sure the fields are unique if ExaminationModel.query.filter( ExaminationModel.id != args['id']).\ filter((ExaminationModel.nome == args['nome']) & (ExaminationModel.ano == args['ano']) & (ExaminationModel.semestre == args['semestre']) ).first(): abort(409, message="An examination with this name already exists") else: examination.id_instituicao_ensino = \ args['id_instituicao_ensino'] examination.nome = args['nome'] examination.ano = args['ano'] examination.semestre = args['semestre'] examination.data_inicio = args['data_inicio'] examination.duracao = args['duracao'] else: abort(409, message="Missing fields") # Commit and return db.session.commit() examination.id return examination
def head(self): abort(405)
def put(self, resource_id): abort(405)
def patch(self, resource_id): abort(405)
def options(self, resource_id): abort(405)
def head(self, resource_id): abort(405)
def patch(self): abort(405)
def options(self): abort(405)
def put(self): abort(405)
def get(self): try: return Node.get_all() except Exception as e: abort(400, message="Error getting all nodes -> {0}".format(e))
def delete(self): abort(405)
def get(self, mock_id): mock = Mock.get(id=mock_id) print(f"Mock is :{mock}") if mock is None: abort(404) return mock, 200
def abort_if_job_not_found(job_id): session = db_session.create_session() job = session.query(Jobs).get(job_id) if not job: abort(404, message=f"Job {job_id} not found")
def get(self, driver): try: driver = to_driver(driver) return output_json(driver.inspect_host_system(), 200) except ValueError: abort(400, message="bad parameter")
def require_fields(req, fields): for f in fields: if f not in req: abort(400)
def post(self): if not request.json: abort(400) post_data = request.json causes_of_death.append(post_data) return 201
def post(self, **kwargs): mock = Mock.create(**kwargs) if mock is None: abort(500) return mock, 201
def abort_if_news_not_found(news_id): session = db_session.create_session() news = session.query(News).get(news_id) if not news: abort(404, message=f"News {news_id} not found")
def post(self, driver, uuid_or_name): try: driver = to_driver(driver) return output_json(driver.resume_node(uuid_or_name), 204) except ValueError: abort(400, message="bad parameter")
def abort_if_usuario_doesnt_exist(usuario_id): if usuario_id not in USUARIOS: abort(404, message="Usuario {} no existe".format(usuario_id))
def abort_if_news_not_found(news_id, data_base): if not data_base.get(news_id): abort(404, message="News {} not found".format(news_id))
def get(self, id): user = User.query.get(id) if not user: abort(400) return jsonify(username=user.username, role=['admin'])
def get(self): current_user = login() print('---->', 'current user is:', current_user) if not current_user: abort(403) return Meal.query.all()
def delete(self, org_id, location_id, role_id, user_id): user = User.query.get_or_404(user_id) role = Role.query.get_or_404(role_id) assoc = RoleToUser.query.filter_by(user_id=user.id, role_id=role.id).first() if assoc is None: abort(404) if assoc.archived: abort(400) assoc.archived = True try: db.session.commit() except: abort(500) location = Location.query.get(location_id) organization = Organization.query.get(org_id) # Set future shifts to unassigned # Be careful to not unassign them from other orgs! future_shifts = Shift2.query.filter( Shift2.user_id == user.id, Shift2.role_id == role_id, Shift2.start > datetime.datetime.utcnow(), ).all() for shift in future_shifts: shift.user_id = None # clear cache too schedule = Schedule2.query \ .filter( Schedule2.role_id == role_id, Schedule2.start <= shift.start, Schedule2.stop > shift.start, ).first() if schedule is not None: Shifts2Cache.delete(schedule.id) # deny future time off requests that are open future_time_off_requests = TimeOffRequest.query \ .filter_by(role_to_user_id=assoc.id) \ .filter_by(state=None) \ .filter( TimeOffRequest.start > datetime.datetime.utcnow(), ) \ .all() for time_off_request in future_time_off_requests: time_off_request.state = "denied" # unassign all recurring shifts recurring_shifts = RecurringShift.query \ .filter_by( role_id=role_id, user_id=user_id ) \ .all() for recurring_shift in recurring_shifts: current_app.logger.info( "Setting recurring shift %s to unassigned because user %s is being removed from role %s" % (recurring_shift.id, user_id, role_id)) recurring_shift.user_id = None # close open timeclocks timeclocks = Timeclock.query \ .filter_by( role_id=role_id, user_id=user_id, stop=None ) \ .all() for timeclock in timeclocks: original_start = timeclock.start original_stop = timeclock.stop timeclock.stop = datetime.datetime.utcnow() current_app.logger.info( "Closing timeclock %s because user %s is being removed from role %s" % (timeclock.id, user_id, role_id)) alert_timeclock_change(timeclock, org_id, location_id, role_id, original_start, original_stop, user, g.current_user) alert_email( user, "You have been removed from a team at %s" % organization.name, "You have been removed from the team <b>%s</b> at the <b>%s</b> location of <b>%s</b>. This may happen as the scheduling manager changes your role or location." % (role.name, location.name, organization.name), force_send=True) g.current_user.track_event("deleted_role_member") return {}, 204
def get_helper(model_instance): if not model_instance: abort(404, message="User does not exist.") return model_instance
def abort_if_apartment_doesnt_exist(apt_id): if apt_id not in buildings: abort(404, message="Apartment {} doesn't exist".format(apt_id))
def get(self, from_id, to_id): connection = logic.get_connection(from_id, to_id) if connection['exists']: return connection abort(404, error="Connection does not exist")
def check_required_parameters(required: set, incoming: set): if not required.issubset(incoming): abort(400, message=f"Missing parameters: {list(required - incoming)}")
def patch(self, org_id, location_id, role_id, user_id): parser = reqparse.RequestParser() parser.add_argument("min_hours_per_workweek", type=int) parser.add_argument("max_hours_per_workweek", type=int) parser.add_argument("internal_id", type=str) parser.add_argument("archived", type=inputs.boolean) parser.add_argument("activateReminder", type=inputs.boolean) parser.add_argument("working_hours", type=str) # Filter out null values changes = parser.parse_args(strict=True) changes = dict((k, v) for k, v in changes.iteritems() if v is not None) rtu = RoleToUser.query.filter_by(user_id=user_id, role_id=role_id).first() if rtu is None: abort(404) if "archived" in changes: if not g.current_user.is_sudo(): return { "message": "You do not have permission to modify 'archived'." }, 401 role = Role.query.get_or_404(role_id) if role.archived: return {"message": "The parent role is archived."}, 400 elif rtu.archived: abort(400) # activation email reminder - it can't be committed to this RTU model though if "activateReminder" in changes: user = User.query.get_or_404(user_id) org = Organization.query.get_or_404(org_id) if user.active: return {"message": "This user is already active"}, 400 user.send_activation_reminder(user, org.name) del changes["activateReminder"] # extract workweek limits and convert into half hour if "min_hours_per_workweek" in changes: min_half_hours_per_workweek = changes["min_hours_per_workweek"] * 2 else: min_half_hours_per_workweek = rtu.min_half_hours_per_workweek if "max_hours_per_workweek" in changes: max_half_hours_per_workweek = changes["max_hours_per_workweek"] * 2 else: max_half_hours_per_workweek = rtu.max_half_hours_per_workweek # some verification if min_half_hours_per_workweek > max_half_hours_per_workweek: return { "message": "min_hours_per_workweek cannot be greater than max_hours_per_workweek" }, 400 if not (0 <= min_half_hours_per_workweek <= 336): return { "message": "min_hours_per_workweek cannot be less than 0" }, 400 if not (0 <= max_half_hours_per_workweek <= 336): return { "message": "max_hours_per_workweek cannot be greater than 168" }, 400 # the proper db term must be submitted if it is intended to be changed if "min_hours_per_workweek" in changes: del changes["min_hours_per_workweek"] changes[ "min_half_hours_per_workweek"] = min_half_hours_per_workweek if "max_hours_per_workweek" in changes: del changes["max_hours_per_workweek"] changes[ "max_half_hours_per_workweek"] = max_half_hours_per_workweek if changes.get("working_hours") is not None: try: working_hours = json.loads(changes.get("working_hours")) except: return { "message": "Unable to parse working hours json body" }, 400 if working_hours is None: return { "message": "Unable to parse working hours json body" }, 400 if not verify_days_of_week_struct(working_hours, True): return { "message": "working hours is improperly formatted" }, 400 g.current_user.track_event("modified_working_hours") for change, value in changes.iteritems(): if value is not None: try: setattr(rtu, change, value) db.session.add(rtu) db.session.commit() except Exception as exception: db.session.rollback() current_app.logger.exception(str(exception)) abort(400) g.current_user.track_event("modified_role_member") return changes
def get(self): result = SensorModel.query.all() if not result: abort(404, message="could not find sensor with that id") return result
def abort_if_superclass_doesnt_exist(super_id): superclass = Superclass.query.get(super_id) if superclass is None: abort(404, message="Superclass {} doesn't exist".format(super_id))