def get_profile_from_id(id, id_type="url_slug", show_secrets=False, include_products=True, include_product_relationships=True): if include_products: if include_product_relationships: query_base = Profile.query else: query_base = db.session.query(Profile).options(orm.noload('*'), orm.subqueryload(Profile.products)) else: query_base = db.session.query(Profile).options(orm.noload('*')) if id_type == "id": try: profile = query_base.get(id) except DataError: # id has to be an int logger.debug(u"get_profile_from_id no profile found from profile id {id}".format( id=id)) profile = None elif id_type == "email": profile = query_base.filter(func.lower(Profile.email) == func.lower(id)).first() elif id_type == "url_slug": profile = query_base.filter(func.lower(Profile.url_slug) == func.lower(id)).first() if not show_secrets: profile = hide_profile_secrets(profile) return profile
def test_noload(self): self.assertEqual( str(self.db.query(Foo).noload('bars')), str(self.db.query(Foo).options(orm.noload('bars'))) ) self.assertEqual( str(self.db.query(Foo).noload('bars', 'bazs')), str(self.db.query(Foo).options(orm.noload('bars').noload('bazs'))) ) self.assertEqual( str(self.db.query(Foo).noload(Foo.bars)), str(self.db.query(Foo).options(orm.noload(Foo.bars))) ) self.assertEqual( str(self.db.query(Foo).noload(Foo.bars, Bar.bazs)), str((self.db.query(Foo) .options(orm.noload(Foo.bars).noload(Bar.bazs)))) ) self.assertEqual( str((self.db.query(Foo) .noload('bars', options=[LoadOption('noload', 'bazs')]))), str(self.db.query(Foo).options(orm.noload('bars').noload('bazs'))) )
def replace_email_with_user(cls, user, relationship_attr): """ Replaces all email-based entries matching the user's email addresses with user-based entries. If the user is already in the ACL, the two entries are merged. :param user: A User object. :param relationship_attr: The name of the relationship pointing to the object associated with the ACL entry. :return: The set of objects where the user has been added to the ACL. """ assert cls.allow_emails updated = set() query = (cls .find(cls.email.in_(user.all_emails)) .options(noload('user'), noload('local_group'), joinedload('event_new').load_only('id'))) for entry in query: parent = getattr(entry, relationship_attr) existing = (cls.query .with_parent(parent, 'acl_entries') .options(noload('user'), noload('local_group')) .filter_by(principal=user) .first()) if existing is None: entry.principal = user else: existing.merge_privs(entry) parent.acl_entries.remove(entry) updated.add(parent) db.session.flush() return updated
def get_application_by_id_admin(application_id): # this function is copied from get_application_by_id and should replace it # at a later point application = ( Application .query .filter( Application.id == application_id ) .options( joinedload("supplier"), joinedload("supplier.domains"), noload("supplier.domains.assessments") ) .first_or_404() ) if application.status == 'deleted': abort(404) # Maximum prices are used on the pricing page to encourage value for money result = ( Domain .query .options( noload('suppliers') ) .all() ) domains = {'prices': {'maximum': {}}} domains['prices']['maximum'] = {domain.name: domain.price_maximum for domain in result} return jsonify(application=application.serializable, domains=domains)
def get_events_managed_by(user, dt=None): """Gets the IDs of events where the user has management privs. :param user: A `User` :param dt: Only include events taking place on/after that date :return: A set of event ids """ query = (user.in_event_acls .join(Event) .options(noload('user'), noload('local_group'), load_only('event_id')) .filter(~Event.is_deleted, Event.ends_after(dt)) .filter(EventPrincipal.has_management_permission('ANY'))) return {principal.event_id for principal in query}
def get_events_managed_by(user, from_dt=None, to_dt=None): """Gets the IDs of events where the user has management privs. :param user: A `User` :param from_dt: The earliest event start time to look for :param to_dt: The latest event start time to look for :return: A set of event ids """ query = (user.in_event_acls .join(Event) .options(noload('user'), noload('local_group'), load_only('event_id')) .filter(~Event.is_deleted, Event.starts_between(from_dt, to_dt)) .filter(EventPrincipal.has_management_role('ANY'))) return {principal.event_id for principal in query}
def update_application(application_id): application_json = get_application_json() application = Application.query.options( noload('supplier.*') ).get(application_id) if application is None: abort(404, "Application '{}' does not exist".format(application_id)) if application.status == 'submitted' and application_json.get('status') == 'saved': db.session.add(AuditEvent( audit_type=AuditTypes.revert_application, user='', data={}, db_object=application )) publish_tasks.application.delay( publish_tasks.compress_application(application), 'reverted' ) application.update_from_json(application_json) save_application(application) errors = ApplicationValidator(application).validate_all() return ( jsonify( application=application.serializable, application_errors=errors), 200)
def get_events_with_linked_sessions(user, dt=None): """Returns a dict with keys representing event_id and the values containing data about the user rights for sessions within the event :param user: A `User` :param dt: Only include events taking place on/after that date """ query = (user.in_session_acls .options(load_only('session_id', 'roles', 'full_access', 'read_access')) .options(noload('*')) .options(contains_eager(SessionPrincipal.session).load_only('event_id')) .join(Session) .join(Event, Event.id == Session.event_id) .filter(~Session.is_deleted, ~Event.is_deleted, Event.ends_after(dt))) data = defaultdict(set) for principal in query: roles = data[principal.session.event_id] if 'coordinate' in principal.roles: roles.add('session_coordinator') if 'submit' in principal.roles: roles.add('session_submission') if principal.full_access: roles.add('session_manager') if principal.read_access: roles.add('session_access') return data
def get_supplier_frameworks_info(code): supplier = Supplier.query.filter( Supplier.code == code ).first_or_404() service_counts = SupplierFramework.get_service_counts(code) supplier_frameworks = ( SupplierFramework .query .options( joinedload('framework'), noload('framework.lots') ) .filter( SupplierFramework.supplier == supplier ) .all() ) return jsonify(frameworkInterest=[ framework.serialize({ 'drafts_count': service_counts.get((framework.framework_id, 'not-submitted'), 0), 'complete_drafts_count': service_counts.get((framework.framework_id, 'submitted'), 0), 'services_count': service_counts.get((framework.framework_id, 'published'), 0) }) for framework in supplier_frameworks] )
def get_scheduled_notes(event): """Gets all notes of scheduled items inside an event""" def _sort_note_by(note): obj = note.object if hasattr(obj, 'start_dt'): return obj.start_dt, 0 else: return obj.contribution.start_dt, obj.position tt_entries = (event.timetable_entries .filter(TimetableEntry.type != TimetableEntryType.BREAK) .options(joinedload('session_block').joinedload('contributions').joinedload('subcontributions')) .options(joinedload('contribution').joinedload('subcontributions')) .options(noload('break_')) .all()) # build a list of all the objects we need notes for. that way we can query # all notes in a single go afterwards instead of making the already-huge # timetable query even bigger. objects = set() for entry in tt_entries: objects.add(entry.object) if entry.type == TimetableEntryType.CONTRIBUTION: objects.update(sc for sc in entry.object.subcontributions if not sc.is_deleted) elif entry.type == TimetableEntryType.SESSION_BLOCK: for contrib in entry.object.contributions: objects.add(contrib) objects.update(sc for sc in contrib.subcontributions if not sc.is_deleted) notes = [x for x in event.all_notes.filter_by(is_deleted=False) if x.object in objects] return sorted(notes, key=_sort_note_by)
def get_application_by_id(application_id): application = ( Application .query .filter( Application.id == application_id ) .options( joinedload('supplier.domains'), joinedload('supplier.domains.assessments'), noload('supplier.domains.assessments.briefs') ) .first_or_404() ) if application.status == 'deleted': abort(404) # Maximum prices are used on the pricing page to encourage value for money result = Domain.query.all() domains = {'prices': {'maximum': {}}} domains['prices']['maximum'] = {domain.name: domain.price_maximum for domain in result} errors = ApplicationValidator(application).validate_all() return jsonify( application=application.serializable, domains=domains, application_errors=errors)
def get_events_with_linked_sessions(user, from_dt=None, to_dt=None): """Returns a dict with keys representing event_id and the values containing data about the user rights for sessions within the event :param user: A `User` :param from_dt: The earliest event start time to look for :param to_dt: The latest event start time to look for """ query = (user.in_session_acls .options(load_only('session_id', 'roles', 'full_access', 'read_access')) .options(noload('*')) .options(contains_eager(SessionPrincipal.session).load_only('event_id')) .join(Session) .join(Event, Event.id == Session.event_id) .filter(~Session.is_deleted, ~Event.is_deleted, Event.starts_between(from_dt, to_dt))) data = defaultdict(set) for principal in query: roles = data[principal.session.event_id] if 'coordinate' in principal.roles: roles.add('session_coordinator') if 'submit' in principal.roles: roles.add('session_submission') if principal.full_access: roles.add('session_manager') if principal.read_access: roles.add('session_access') return data
def set_coauthors(self): # comment out the commit. this means coauthors made during this commit session don't show up on this refresh # but doing it because is so much faster # safe_commit(db) # now go for it print u"running coauthors for {}".format(self.orcid_id) coauthor_orcid_id_query = u"""select distinct orcid_id from product where doi in (select doi from product where orcid_id='{}')""".format(self.orcid_id) rows = db.engine.execute(text(coauthor_orcid_id_query)) # remove own orcid_id orcid_ids = [row[0] for row in rows if row[0] if row[0] != self.id] if not orcid_ids: return # don't load products or badges coauthors = Person.query.filter(Person.orcid_id.in_(orcid_ids)).options(orm.noload('*')).all() resp = {} for coauthor in coauthors: resp[coauthor.orcid_id] = { "name": coauthor.full_name, "id": coauthor.id, "orcid_id": coauthor.orcid_id, "num_posts": coauthor.num_posts, } self.coauthors = resp
def get_events_with_paper_roles(user, dt=None): """ Get the IDs and PR roles of events where the user has any kind of paper reviewing privileges. :param user: A `User` :param dt: Only include events taking place on/after that date :return: A dict mapping event IDs to a set of roles """ paper_roles = {'paper_manager', 'paper_judge', 'paper_content_reviewer', 'paper_layout_reviewer'} role_criteria = [EventPrincipal.has_management_role(role, explicit=True) for role in paper_roles] query = (user.in_event_acls .join(Event) .options(noload('user'), noload('local_group'), load_only('event_id', 'roles')) .filter(~Event.is_deleted, Event.ends_after(dt)) .filter(db.or_(*role_criteria))) return {principal.event_id: set(principal.roles) & paper_roles for principal in query}
def get_profile_stubs_from_url_slug(url_slug): # query_base = db.session.query(Profile).options(orm.lazyload('*'), orm.subqueryload(Profile.products)) # query_base = db.session.query(Profile).options(orm.noload('*'), subqueryload("products").subqueryload("alias_rows")) query_base = Profile.query.options( orm.noload('*'), orm.subqueryload(Profile.products), orm.subqueryload(Profile.products, Product.biblio_rows), orm.subqueryload(Profile.products, Product.alias_rows)) profile = query_base.filter(func.lower(Profile.url_slug) == func.lower(url_slug)).first() return profile
def applications_list_response(with_task_status=False, status=None): if status: applications = Application.query.options( joinedload('supplier'), noload('supplier.domains') ).filter(Application.status == status) else: applications = Application.query.filter(Application.status != 'deleted') return format_applications(applications, with_task_status)
def get_supplier(code): supplier = ( Supplier .query .filter( Supplier.code == code, Supplier.status != 'deleted' ) .options( joinedload('domains.domain'), noload('domains.supplier'), noload('domains.assessments'), joinedload('domains.recruiter_info') ) .first_or_404() ) supplier.get_service_counts() return jsonify(supplier=supplier.serializable)
def update_refsets(): from models.person import Person print u"getting the badge percentile refsets...." # only get out the badge objects q = db.session.query(Person).options( Load(Person).load_only("campaign", "orcid_id")) q = q.options(orm.noload('*')) q = q.options(orm.subqueryload("badges")) # limit to just what we want for the refset q = refine_refset_query(q) # and do the get rows = q.all() print u"query finished, now set the values in the lists" refset_list_dict = defaultdict(list) for person in rows: for badge in person.badges: # print "BADGE", badge # handle the nones below, with the zeros if badge.value != None: refset_list_dict[badge.name].append(badge.value) num_in_refset = num_people_in_refset() for name, unsorted_values in refset_list_dict.iteritems(): print u"refreshing refset {}".format(name) assigner = get_badge_assigner(name) if assigner.pad_percentiles_with_zeros: # pad with zeros for all the people who didn't get the badge unsorted_values.extend([0] * (num_in_refset - len(unsorted_values))) # now sort refset_list_dict[name] = sorted(unsorted_values) # now pick out the cutoffs, minimum value at each of 100 cutoffs = [] for sublist in chunk_into_n_sublists(refset_list_dict[name], 100): sublist_values = sublist if sublist_values: cutoffs.append(min(sublist_values)) this_badge_refset = Refset(name=name, cutoffs=cutoffs) print u"saving refset {} with cutoffs {}".format(name, cutoffs) db.session.merge(this_badge_refset) # and finally save it all safe_commit(db)
def auth_user(): json_payload = get_json_from_request() json_has_required_keys(json_payload, ["authUsers"]) json_payload = json_payload["authUsers"] validate_user_auth_json_or_400(json_payload) email_address = json_payload.get('email_address', None) if email_address is None: # will remove camel case email address with future api email_address = json_payload.get('emailAddress', None) user = User.query.options( joinedload('supplier'), noload('supplier.*'), joinedload('application'), noload('application.*'), noload('*') ).filter( User.email_address == email_address.lower() ).first() if user is None or (user.supplier and user.supplier.status == 'deleted'): return jsonify(authorization=False), 404 elif encryption.authenticate_user(json_payload['password'], user) and user.active: user.logged_in_at = datetime.utcnow() user.failed_login_count = 0 db.session.add(user) db.session.commit() validation_result = None if user.role == 'supplier': messages = supplier_business.get_supplier_messages(user.supplier_code, False) validation_result = ( messages._asdict() if messages else None ) return jsonify(users=user.serialize(), validation_result=validation_result), 200 else: user.failed_login_count += 1 db.session.add(user) db.session.commit() return jsonify(authorization=False), 403
def validate_user_password(cls, username, password): user = DBSession.query(cls).options(noload(cls.groups)).filter(cls.username == username.lower()).first() if user is None: return None manager = BCRYPTPasswordManager() if manager.check(user.credentials, password): return user return None
def get_files(self, job_id): """ Get the files within a job """ owner = Session.query(Job.user_dn, Job.vo_name).filter(Job.job_id == job_id).first() if owner is None: raise HTTPNotFound('No job with the id "%s" has been found' % job_id) if not authorized(TRANSFER, resource_owner=owner[0], resource_vo=owner[1]): raise HTTPForbidden('Not enough permissions to check the job "%s"' % job_id) files = Session.query(File).filter(File.job_id == job_id).options(noload(File.retries)) return files.all()
def _process(self): author_contribs = (Contribution.query.with_parent(self.event_new) .join(ContributionPersonLink) .options(noload('*')) .options(joinedload('event_new')) .options(load_only('id', 'title')) .filter(ContributionPersonLink.id == self.author.id, ContributionPersonLink.author_type != AuthorType.none) .all()) return WPEventDisplay.render_template('person_display.html', self._conf, author=self.author, contribs=author_contribs)
def search_applications(keyword): if not keyword: return applications_list_response(with_task_status=False) if keyword.isdigit(): applications = Application.query.filter(Application.id == keyword) else: applications = Application.query.outerjoin(User) applications = applications.filter(or_( Application.data["name"].astext.ilike('%{}%'.format(keyword)), Application.data['contact_email'].astext.ilike('%{}%'.format(keyword)), User.email_address.ilike('%{}%'.format(keyword)) )) applications = applications.options( noload("supplier"), noload("supplier.domains"), noload("supplier.domains.assessments") ) return format_applications(applications, False)
def get_events_with_linked_event_persons(user, dt=None): """Returns a list of all events for which the user is an EventPerson :param user: A `User` :param dt: Only include events taking place on/after that date """ query = (user.event_persons .options(load_only('event_id')) .options(noload('*')) .join(Event, Event.id == EventPerson.event_id) .filter(EventPerson.event_links.any()) .filter(~Event.is_deleted, Event.ends_after(dt))) return {ep.event_id for ep in query}
def _rebuild_availability_ranges(context, subnets): """Rebuild availability ranges. This method is called only when there's no more IP available or by _update_subnet_allocation_pools. Calling _update_subnet_allocation_pools before calling this function deletes the IPAllocationPools associated with the subnet that is updating, which will result in deleting the IPAvailabilityRange too. """ ip_qry = context.session.query( models_v2.IPAllocation).with_lockmode('update') # PostgreSQL does not support select...for update with an outer join. # No join is needed here. pool_qry = context.session.query( models_v2.IPAllocationPool).options( orm.noload('available_ranges')).with_lockmode('update') for subnet in sorted(subnets): LOG.debug("Rebuilding availability ranges for subnet %s", subnet) # Create a set of all currently allocated addresses ip_qry_results = ip_qry.filter_by(subnet_id=subnet['id']) allocations = netaddr.IPSet([netaddr.IPAddress(i['ip_address']) for i in ip_qry_results]) for pool in pool_qry.filter_by(subnet_id=subnet['id']): # Create a set of all addresses in the pool poolset = netaddr.IPSet(netaddr.IPRange(pool['first_ip'], pool['last_ip'])) # Use set difference to find free addresses in the pool available = poolset - allocations # Generator compacts an ip set into contiguous ranges def ipset_to_ranges(ipset): first, last = None, None for cidr in ipset.iter_cidrs(): if last and last + 1 != cidr.first: yield netaddr.IPRange(first, last) first = None first, last = first if first else cidr.first, cidr.last if first: yield netaddr.IPRange(first, last) # Write the ranges to the db for ip_range in ipset_to_ranges(available): available_range = models_v2.IPAvailabilityRange( allocation_pool_id=pool['id'], first_ip=str(netaddr.IPAddress(ip_range.first)), last_ip=str(netaddr.IPAddress(ip_range.last))) context.session.add(available_range)
def query_model(session, sa_class, only_pk=False): """ Returns a query for *sa_class* that doesn't load any relationship attribute. """ opts = (noload('*'),) if only_pk: pk = get_pk(sa_class) opts += tuple( defer(prop.key) for prop in class_mapper(sa_class).iterate_properties if isinstance(prop, ColumnProperty) if prop.key != pk) return session.query(sa_class).options(*opts)
def get_events_with_linked_event_persons(user, from_dt=None, to_dt=None): """Returns a list of all events for which the user is an EventPerson :param user: A `User` :param from_dt: The earliest event start time to look for :param to_dt: The latest event start time to look for """ query = (user.event_persons .options(load_only('event_id')) .options(noload('*')) .join(Event, Event.id == EventPerson.event_id) .filter(EventPerson.event_links.any()) .filter(~Event.is_deleted, Event.starts_between(from_dt, to_dt))) return {ep.event_id for ep in query}
def list_board_topics(self, board_id): #board_exists = Boards.query.filter_by( id=board_id ).first() if not board_id_exists(self.app.boards, board_id): abort(404) board_topics = self.Topics.query.\ filter_by( board_id=board_id, reply_to_id='0' ).\ order_by(self.Topics.created.desc()).\ options(noload('children')).\ limit(3).\ all() topics_dump_json = self.app.posts.PostsSchema(many=True).dumps(board_topics).data return response(topics_dump_json, 200)
def get_events_managed_by(user, from_dt=None, to_dt=None): """Gets the IDs of events where the user has management privs. :param user: A `User` :param from_dt: The earliest event start time to look for :param to_dt: The latest event start time to look for :return: A set of event ids """ event_date_filter = None if from_dt and to_dt: event_date_filter = IndexedEvent.start_date.between(from_dt, to_dt) elif from_dt: event_date_filter = IndexedEvent.start_date >= from_dt elif to_dt: event_date_filter = IndexedEvent.start_date <= to_dt query = (user.in_event_acls .join(Event) .options(noload('user'), noload('local_group'), load_only('event_id')) .filter(~Event.is_deleted) .filter(EventPrincipal.has_management_role('ANY'))) if event_date_filter is not None: query = query.join(IndexedEvent, IndexedEvent.id == EventPrincipal.event_id) query = query.filter(event_date_filter) return {principal.event_id for principal in query}
def add_contrib_data(): has_contrib = (EventPerson.contribution_links.any( ContributionPersonLink.contribution.has(~Contribution.is_deleted))) has_subcontrib = EventPerson.subcontribution_links.any( SubContributionPersonLink.subcontribution.has(db.and_( ~SubContribution.is_deleted, SubContribution.contribution.has(~Contribution.is_deleted)))) query = (Event.query .options(load_only('id')) .options(noload('*')) .filter(~Event.is_deleted, Event.ends_after(dt), Event.persons.any((EventPerson.user_id == user.id) & (has_contrib | has_subcontrib)))) for event in query: data[event.id].add('contributor')
def add_contrib_data(): has_contrib = (EventPerson.contribution_links.any( ContributionPersonLink.contribution.has(~Contribution.is_deleted))) has_subcontrib = EventPerson.subcontribution_links.any( SubContributionPersonLink.subcontribution.has( db.and_( ~SubContribution.is_deleted, SubContribution.contribution.has( ~Contribution.is_deleted)))) query = (Event.query.options(load_only('id')).options( noload('*')).filter( ~Event.is_deleted, Event.ends_after(dt), Event.persons.any((EventPerson.user_id == user.id) & (has_contrib | has_subcontrib)))) for event in query: data[event.id].add('contributor')
def add_acl_data(): query = (user.in_contribution_acls .options(load_only('contribution_id', 'permissions', 'full_access', 'read_access')) .options(noload('*')) .options(contains_eager(ContributionPrincipal.contribution).load_only('event_id')) .join(Contribution) .join(Event, Event.id == Contribution.event_id) .filter(~Contribution.is_deleted, ~Event.is_deleted, Event.ends_after(dt))) for principal in query: roles = data[principal.contribution.event_id] if 'submit' in principal.permissions: roles.add('contribution_submission') if principal.full_access: roles.add('contribution_manager') if principal.read_access: roles.add('contribution_access')
def create_project_post(project_id): form = PostForm() if form.validate_on_submit(): project = Project.query.options(noload('posts')).get(project_id) uploaded_files = request.files.getlist("project_upload[]") title, body = (request.form.get(key) for key in ('title', 'body')) if not title: flash('Please provide a post title', 'danger') elif not body: flash('Please provide post content', 'danger') else: new_post = ProjectPost(title, body) project.posts.append(new_post) db.session.flush() try: db.session.commit() except SQLAlchemyError: flash('Error creating the post. Please try again later.', 'danger') else: if uploaded_files: for f in uploaded_files: if allowed_file(f.filename): filename = secure_filename(f.filename) save_path = os.path.join(app.config['UPLOAD_FOLDER'], "project_post_uploads", str(project_id), str(new_post.id)) if not os.path.exists(save_path): os.makedirs(save_path) f.save(os.path.join(save_path, filename)) members_email = [] for user in project.members: if not user == current_user: members_email.append(user.email) print members_email send_email(current_user.email, "{0} Discussion Board Update".format(project.name), "email/project_board_update", bcc=members_email, current_user=current_user, project=project, title=title) return redirect(url_for('show_project', project_id=project_id))
def get_vthunders_by_ip_address(self, session, ip_address, vthunders=False): model_list = session.query(self.model_class).filter( self.model_class.ip_address == ip_address).filter( and_( self.model_class.status == "ACTIVE", or_(self.model_class.role == "STANDALONE", self.model_class.role == "MASTER"))) if vthunders == False: id_list = [model.id for model in model_list] return id_list else: model_list = model_list.options(noload('*')) return model_list.all()
def sort_list_tasks(list_id): req_data = request.get_json() user_id = g.user.get('id') order = req_data.get('order') if order is None: return jsonify(msg="Missing params"), 400 requested_tasks = Tasks.query.options(noload('priority')).filter_by( list_id=list_id, user_id=user_id).all() for task in requested_tasks: task.position = order.index(task.id) db.session.commit() return jsonify(msg="Tasks sorted"), 200
def get_all_deleted_expiring(self, session, exp_age): expiry_time = datetime.datetime.utcnow() - exp_age query = session.query(self.model_class).filter( self.model_class.updated_at < expiry_time) if hasattr(self.model_class, 'status'): query = query.filter(or_(self.model_class.status == "USED_SPARE", self.model_class.status == consts.DELETED)) else: query = query.filter_by(operating_status=consts.DELETED) # Do not load any relationship query = query.options(noload('*')) model_list = query.all() id_list = [model.id for model in model_list] return id_list
def send_test_ping_delivery( deliveree_uuid: str) -> Optional[requests.Response]: webhook = (models.Webhook.query.options( noload(models.Subscriber.subscriptions)).filter( models.Webhook.uuid == deliveree_uuid).one()) payload = { "delivered_for": marshal(webhook, schema.webhook), "event": { "type": "ping", "uuid": str(uuid.uuid4()), "timestamp": str(datetime.datetime.now(datetime.timezone.utc)), }, } _post_process_payload(payload, webhook) if webhook.content_type == models.Webhook.ContentType.URLENCODED.value: urlencoded_payload = {} urlencoded_payload["delivered_for"] = json.dumps( payload["delivered_for"]) urlencoded_payload["event"] = json.dumps(payload["event"]) request = requests.Request("POST", webhook.url, data=urlencoded_payload) elif webhook.content_type == models.Webhook.ContentType.JSON.value: request = requests.Request("POST", webhook.url, json=payload) request = request.prepare() body = request.body if not isinstance(body, bytes): body = body.encode("utf-8") signature = hmac.new(bytes(webhook.secret, "utf-8"), body, hashlib.sha256) _inject_headers( request.headers, payload["event"]["type"], str(uuid.uuid4()), signature.hexdigest(), ) try: with requests.Session() as session: return session.send(request, verify=webhook.verify_ssl, timeout=5) except Exception as e: logger.error(e) return None
def update_status(self, context, model, id, status): LOG.debug(_("update_status for_%(model)s id %(id)s %(status)s"), {'model':model, 'id':id, 'status':status } ) with context.session.begin(subtransactions=True): if issubclass(model, models.LoadBalancer): try: model_db = (self._model_query(context, model). filter(model.id == id). options(orm.noload('vip_port')). one()) except exc.NoResultFound: raise loadbalancerv2.EntityNotFound( name=models.LoadBalancer.NAME, id=id) else: model_db = self._get_resource(context, model, id) if model_db.status != status: model_db.status = status
def lecture_journal(context: LectureResource, request: Request) -> Response: lecture = context.model(noload("amendements")) settings = request.registry.settings refreshable = lecture.refreshable_for("articles", settings) or lecture.refreshable_for( "amendements", settings) can_refresh = request.has_permission("refresh", context) refreshing = lecture.get_fetch_progress() allowed_to_refresh = refreshable and can_refresh and not refreshing return { "lecture": lecture, "dossier_resource": context.dossier_resource, "lecture_resource": context, "current_tab": "journal", "today": date.today(), "allowed_to_refresh": allowed_to_refresh, }
def post(self, song_id, **options): args = self.post_parser.parse_args() try: pajbot.web.utils.pleblist_login(args["password"], app.bot_config) except pajbot.exc.InvalidLogin as e: return {"error": str(e)}, 401 with DBManager.create_session_scope() as db_session: song = db_session.query(PleblistSong).options(noload("*")).filter_by(id=song_id).one_or_none() if song is None: abort(404) db_session.delete(song) db_session.flush() return {"message": "GOT EM"}, 200
def __GET_ById__Generic(properClass, properClassName, uri, isAdmin, id_): noloadOptions = [] if ( isAdmin == False ): #we wont load unecessary data so we use the publicnoload attribut to avoid loading children records for entry in properClass.__publicNoLoadOptions__: noloadOptions.append(noload(entry)) # this route has a specific behavior for bookable slots & covid tracking, so we make sure we're not in on of these context # retrieve what should be given within the body received_request = [] try: if (request.json is not None): received_request = ujson.loads(request.json) except Exception as e: traceback.print_exc() request_filter = [] request_filter.append(properClass.id == id_) # if object is a bookedslot, then the confirmationcode must be given also in the request if (properClassName == 'bookedslots'): if ('confirmationCode' not in received_request): raise Exception( "Error, confirmation code is mandatory to display a booked slot." ) else: request_filter.append(properClass.confirmationCode == received_request['confirmationCode']) #load objects with or without children records objects = properClass.query.filter( and_(*request_filter)).options(noloadOptions).first_or_404() # if we are using an admin then we will display all fields, if not .. then we will remove unnecessary fields if (isAdmin): try: result_dict = [item.serialize for item in objects] except TypeError: result_dict = objects.serialize else: try: result_dict = [item.serialize_public for item in objects] except TypeError: result_dict = objects.serialize_public logs.logger.info(result_dict) return make_response(jsonify(result_dict), 200)
def profile_comparison_cluster(cluster_id, normalize=0): """ This will get all the expression profiles for members of given cluster and plot them :param cluster_id: internal id of the cluster to visualize :param normalize: if the plot should be normalized (against max value of each series) """ cluster = CoexpressionCluster.query.get(cluster_id) associations = SequenceCoexpressionClusterAssociation.query.\ filter_by(coexpression_cluster_id=cluster_id).\ options(noload(SequenceCoexpressionClusterAssociation.sequence)).\ all() probes = [a.probe for a in associations] # get max 51 profiles, only show the first 50 (the extra one is fetched to throw the warning) profiles = ExpressionProfile.get_profiles( cluster.method.network_method.species_id, probes, limit=51) if len(profiles) > 50: flash( Markup(( "To many profiles in this cluster only showing the <strong>first 50</strong>. <br />" + "<strong>Note:</strong> The <a href='%s'>heatmap</a> can be used to with more genes and " + "allows downloading the data for local analysis.") % url_for('heatmap.heatmap_cluster', cluster_id=cluster_id)), 'warning') profile_chart = prepare_profiles( profiles[:50], True if normalize == 1 else False, ylabel='TPM' + (' (normalized)' if normalize == 1 else '')) # Get table in base64 format for download data = base64.encodebytes( prepare_profiles_download( profiles[:50], True if normalize == 1 else False).encode('utf-8')) return render_template("expression_profile_comparison.html", profiles=json.dumps(profile_chart), normalize=normalize, cluster=cluster, data=data.decode('utf-8'))
def create_core_post(core_id): form = PostForm() if form.validate_on_submit(): core = Core.query.options(noload('posts')).get(core_id) title, body = (request.form.get(key) for key in ('title', 'body')) if not title: flash('Please provide a post title', 'danger') elif not body: flash('Please provide post content', 'danger') else: core.posts.append(CorePost(title, body)) try: db.session.commit() except SQLAlchemyError: flash('Error creating the post. Please try again later.', 'danger') return redirect(url_for('show_core', core_id=core_id))
def sequence_fasta_protein(sequence_id): """ Returns the protein sequence as a downloadable fasta file :param sequence_id: ID of the sequence :return: Response with the fasta file """ current_sequence = Sequence.query\ .options(undefer('coding_sequence'))\ .options(noload('xrefs'))\ .get_or_404(sequence_id) fasta = ">" + current_sequence.name + "\n" + current_sequence.protein_sequence + "\n" response = make_response(fasta) response.headers[ "Content-Disposition"] = "attachment; filename=" + current_sequence.name + ".protein.fasta" response.headers['Content-type'] = 'text/plain' return response
def edit_list_title(list_id): req_data = request.get_json() user_id = g.user.get('id') title = req_data.get('title') requested_list = Lists.query.options(noload('tasks')).filter_by(id=list_id).first() if requested_list is None: return jsonify(msg="List not found"), 404 if requested_list.user_id != user_id: return jsonify(msg="You can't perform this action."), 403 if requested_list.title == title: return jsonify("New title can't be equal to the actual one"), 400 requested_list.title = title db.session.commit() return jsonify(msg="Title updated"), 200
def _score_base(db: Session, batch: int, dept: str = None, sem: int = None): scheme = get_scheme(db, batch) usns = db.query(Student.Usn).filter(Student.Batch == batch) students = (db.query(Student).filter(Student.Batch == batch).options( noload("Scores"))) if dept is not None: usns = usns.filter(Student.Department == dept) students = students.filter(Student.Department == dept) subcodes = db.query(Subject.Code).filter(Subject.Scheme == scheme) if sem is not None: subcodes = subcodes.filter(Subject.Semester == sem) scores = db.query(Score).filter(Score.Usn.in_(usns), Score.SubjectCode.in_(subcodes)) return (students, scores)
def get_supplier_frameworks_info(code): supplier = Supplier.query.filter(Supplier.code == code).first_or_404() service_counts = SupplierFramework.get_service_counts(code) supplier_frameworks = (SupplierFramework.query.options( joinedload('framework'), noload('framework.lots')).filter( SupplierFramework.supplier == supplier).all()) return jsonify(frameworkInterest=[ framework.serialize({ 'drafts_count': service_counts.get((framework.framework_id, 'not-submitted'), 0), 'complete_drafts_count': service_counts.get((framework.framework_id, 'submitted'), 0), 'services_count': service_counts.get((framework.framework_id, 'published'), 0) }) for framework in supplier_frameworks ])
def update_task_title(task_id): req_data = request.get_json() new_title = req_data.get('title') user_id = g.user.get('id') if new_title is None: return jsonify(msg="Missing param: title"), 400 requested_task = Tasks.query.options( noload('priority')).filter_by(id=task_id).first() if user_id != requested_task.user_id: return jsonify(msg="You can't perform this action."), 403 elif requested_task.title == new_title: return jsonify(msg="New title can't be the same as the old one."), 400 requested_task.title = new_title db.session.commit() return jsonify(msg="Task updated"), 200
def update_status(self, context, model, id, status, status_description=None): with context.session.begin(subtransactions=True): if issubclass(model, Vip): try: v_db = (self._model_query(context, model). filter(model.id == id). options(orm.noload('port')). one()) except exc.NoResultFound: raise loadbalancer.VipNotFound(vip_id=id) else: v_db = self._get_resource(context, model, id) if v_db.status != status: v_db.status = status # update status_description in two cases: # - new value is passed # - old value is not None (needs to be updated anyway) if status_description or v_db['status_description']: v_db.status_description = status_description
def update_status(self, context, model, id, provisioning_status=None, operating_status=None): with context.session.begin(subtransactions=True): if issubclass(model, models.LoadBalancer): try: model_db = (self._model_query(context, model). filter(model.id == id). options(orm.noload('vip_port')). one()) except exc.NoResultFound: raise loadbalancerv2.EntityNotFound( name=models.LoadBalancer.NAME, id=id) else: model_db = self._get_resource(context, model, id) if provisioning_status and (model_db.provisioning_status != provisioning_status): model_db.provisioning_status = provisioning_status if (operating_status and hasattr(model_db, 'operating_status') and model_db.operating_status != operating_status): model_db.operating_status = operating_status
def convert_dict_to_marshmallow_result( data: Mapping[str, Any], identifier: str, identifier_column: str, domain_model: DomainModel, sqlalchemy_model: DeclarativeMeta, schema: ModelSchema, patch_data: Optional[Mapping[str, Any]] = None, ) -> Union[ModelSchema, List[str]]: result = sqlalchemy_model.query.filter_by(**{ identifier_column: identifier }).options(noload('*')).first() if patch_data is not None: data = {**data, **patch_data} joined_entity_ids_or_errors = create_joined_entity_id_map( domain_model, data, ) if isinstance(joined_entity_ids_or_errors, list): return joined_entity_ids_or_errors data = convert_properties_to_sqlalchemy_properties( domain_model, joined_entity_ids_or_errors, json_dict_to_python_dict(preserve_user_json(data)), ) if result is not None: # don't use the 'id' from the json request data = {**data, **{'id': result.id}} marshmallow_result = schema.load( json_dict_to_python_dict(data), session=db.session, instance=result, ) return marshmallow_result
def list_users(self, customer_id: int, limit=20, offset=0, email=None, name=None): """ List the users from the API. Args: customer_id (int): The corresponding customer id. limit (int): The max number of returned users. offset (int): The cursor. email (string): An email to filter on. name (string): A name to filter on. Returns: (list of dict, boolean): A list of user representations. The boolean stands for if there is more to fetch. """ session = self.get_session() columns = [u"id", u"email", u"name", u"active"] filters = [] if email is not None: filters.append(User.email.like(u"%{}%".format(email))) if name is not None: filters.append(User.name.like(u"%{}%".format(name))) filters.append(User.customer==customer_id) users = session.query(User)\ .options(load_only(*columns), noload(u"roles"))\ .filter(or_(*filters))\ .offset(offset)\ .limit(limit+1) if users.count() > limit: users = users[:-1] has_next = True else: has_next = False return [ self.to_user_dict(user, with_roles=False) for user in users ], has_next
def update_task_description(task_id): req_data = request.get_json() description = req_data.get('description') user_id = g.user.get('id') if description is None: return jsonify(msg="Missing param: description"), 400 requested_task = Tasks.query.options( noload('priority')).filter_by(id=task_id).first() if user_id != requested_task.user_id: return jsonify(msg="You can't perform this action."), 403 elif requested_task.description == description: return jsonify( msg="New description can't be the same as the old one."), 400 requested_task.description = description db.session.commit() return jsonify(msg="Task updated"), 200
def get_suppliers_by_name_keyword(self, keyword, framework_slug=None, category=None): query = (db.session.query(Supplier) .filter(Supplier.name.ilike('%{}%'.format(keyword.encode('utf-8')))) .filter(Supplier.status != 'deleted') .options( joinedload(Supplier.frameworks), joinedload(Supplier.domains), joinedload(Supplier.prices), joinedload('domains.domain'), joinedload('prices.service_role'), joinedload('frameworks.framework'), noload('frameworks.framework.lots'), raiseload('*'))) if framework_slug: query = query.outerjoin(SupplierFramework).outerjoin(Framework) query = query.filter(Framework.slug == framework_slug) if category: query = query.outerjoin(SupplierDomain) query = query.filter(SupplierDomain.domain_id == category).filter(SupplierDomain.status == 'assessed') query.order_by(Supplier.name.asc()).limit(20) return query.all()
def committee_meeting_list(id): cte = Committee.query.get(id) if not cte: abort(404) query = CommitteeMeeting.query.filter( CommitteeMeeting.committee == cte).order_by(desc( CommitteeMeeting.date)) # defer some expensive fields if they're not needed fields = get_api_fields() if fields: for f in ['body', 'summary']: if f not in fields: query = query.options(defer(f)) if not any(f == 'committee' or f.startswith('committee.') for f in fields): query = query.options(noload('committee')) return api_list_items(query, CommitteeMeetingSchema)
def get_student(db: Session, usn: str) -> StudentReport: """Obtain Student From USN Args: db (Session): SQLAlchemy Session usn (str): USN Code Raises: NoResultFound Returns: StudentReport: Student Object """ res = ( db.query(Student) .options(noload(Student.Scores)) .filter(Student.Usn == usn) .one() ) rep = StudentReport.from_orm(res) return rep
def patch(self, relatedBookUuid): # type: ignore id_validation_errors = related_book_schema.validate( {'related_book_uuid': relatedBookUuid}, session=db.session, partial=True) if id_validation_errors: abort(404) result: Optional[RelatedBook] = RelatedBook.query.filter_by(related_book_uuid=relatedBookUuid)\ .options(noload('*')).first() # noqa: E501 if result is None: abort(404) data = request.get_json(force=True) if not isinstance(data, dict): abort(400) marshmallow_schema_or_errors = convert_dict_to_marshmallow_result( data=json_dict_to_python_dict(model_to_dict(result)), identifier=relatedBookUuid, identifier_column='related_book_uuid', domain_model=related_book_domain_model, sqlalchemy_model=RelatedBook, schema=related_book_schema, patch_data=data, ) if isinstance(marshmallow_schema_or_errors, list): abort(400, marshmallow_schema_or_errors) if marshmallow_schema_or_errors.errors: abort( 400, python_dict_to_json_dict(marshmallow_schema_or_errors.errors)) db.session.add(marshmallow_schema_or_errors.data) db.session.commit() return python_dict_to_json_dict( model_to_dict(marshmallow_schema_or_errors.data, )), 200
def export_xlsx(context: LectureResource, request: Request) -> Response: lecture = context.model(noload("amendements")) nums, article_param = parse_params(request, lecture=lecture) if article_param == "all": amendements = ( DBSession.query(Amendement) .join(Article) .filter( Amendement.lecture == lecture, Amendement.num.in_(nums), # type: ignore ) .options(USER_CONTENT_OPTIONS, LOCATION_OPTIONS) ) else: article_type, article_num, article_mult, article_pos = article_param.split(".") amendements = ( DBSession.query(Amendement) .filter( Article.pk == Amendement.article_pk, Amendement.lecture == lecture, Article.type == article_type, Article.num == article_num, Article.mult == article_mult, Article.pos == article_pos, Amendement.num.in_(nums), # type: ignore ) .options(USER_CONTENT_OPTIONS, LOCATION_OPTIONS) ) expanded_amendements = list(Batch.expanded_batches(amendements)) with NamedTemporaryFile() as file_: tmp_file_path = os.path.abspath(file_.name) write_xlsx(lecture, tmp_file_path, request, amendements=expanded_amendements) return write_response( tmp_file_path=tmp_file_path, fmt="xlsx", lecture=lecture, article_param=article_param, amendements=expanded_amendements, )
def delete_application(application_id): """ Delete a Application :param application_id: :return: """ updater_json = validate_and_return_updater_request() application = Application.query.options( noload('supplier') ).filter( Application.id == application_id ).first_or_404() db.session.add(AuditEvent( audit_type=AuditTypes.delete_application, user=updater_json['updated_by'], data={}, db_object=application )) application.status = 'deleted' users = User.query.filter( User.application_id == application_id ).all() # this should go back to previous application id, not just none. for user in users: user.application = None try: db.session.commit() publish_tasks.application.delay( publish_tasks.compress_application(application), 'deleted' ) except IntegrityError as e: db.session.rollback() abort(400, "Database Error: {0}".format(e)) return jsonify(message="done"), 200
def get_scheduled_notes(event): """Gets all notes of scheduled items inside an event""" tt_entries = (event.timetable_entries .filter(TimetableEntry.type != TimetableEntryType.BREAK) .options(joinedload('session_block').joinedload('contributions').joinedload('subcontributions')) .options(joinedload('contribution').joinedload('subcontributions')) .options(noload('break_')) .all()) # build a list of all the objects we need notes for. that way we can query # all notes in a single go afterwards instead of making the already-huge # timetable query even bigger. objects = set() for entry in tt_entries: objects.add(entry.object) if entry.type == TimetableEntryType.CONTRIBUTION: objects.update(sc for sc in entry.object.subcontributions if not sc.is_deleted) elif entry.type == TimetableEntryType.SESSION_BLOCK: for contrib in entry.object.contributions: objects.add(contrib) objects.update(sc for sc in contrib.subcontributions if not sc.is_deleted) notes = [x for x in event.all_notes.filter_by(is_deleted=False) if x.object in objects] return sorted(notes, key=lambda x: x.object.start_dt)