def test_lazyload(self): self.assertEqual( str(self.db.query(Foo).lazyload('bars')), str(self.db.query(Foo).options(orm.lazyload('bars'))) ) self.assertEqual( str(self.db.query(Foo).lazyload('bars', 'bazs')), str((self.db.query(Foo) .options(orm.lazyload('bars').lazyload('bazs')))) ) self.assertEqual( str(self.db.query(Foo).lazyload(Foo.bars)), str(self.db.query(Foo).options(orm.lazyload(Foo.bars))) ) self.assertEqual( str(self.db.query(Foo).lazyload(Foo.bars, Bar.bazs)), str((self.db.query(Foo) .options(orm.lazyload(Foo.bars).lazyload(Bar.bazs)))) ) self.assertEqual( str((self.db.query(Foo) .lazyload('bars', options=[LoadOption('lazyload', 'bazs')]))), str((self.db.query(Foo) .options(orm.lazyload('bars').lazyload('bazs')))) )
def render(self, session, **arguments): q = session.query(Chassis) q = q.options(subqueryload('model'), joinedload('model.machine_specs'), subqueryload('location'), joinedload('slots'), subqueryload('slots.machine'), # A rare case when we don't need primary name/host lazyload('slots.machine.primary_name'), lazyload('slots.machine.host'), subqueryload('interfaces'), joinedload('interfaces.assignments'), joinedload('interfaces.assignments.network'), joinedload('interfaces.assignments.dns_records')) # Prefer the primary name for ordering q = q.outerjoin(DnsRecord, (Fqdn, DnsRecord.fqdn_id == Fqdn.id), DnsDomain) q = q.options(contains_eager('primary_name'), contains_eager('primary_name.fqdn'), contains_eager('primary_name.fqdn.dns_domain')) q = q.order_by(Fqdn.name, DnsDomain.name, Chassis.label) return q.all()
def _finish_print_pricelist(dept_id, include_all): # We want all items currently in stock, restricted by department if # dept_id is not None l = td.s.query(StockType)\ .select_from(StockItem)\ .filter(StockItem.finished == None)\ .join(StockType)\ .options(lazyload(StockType.department))\ .options(lazyload(StockType.unit))\ .group_by(StockType)\ .order_by(StockType.dept_id, StockType.manufacturer, StockType.name) if dept_id: l = l.filter(StockType.dept_id == dept_id) if not include_all: l = l.filter(StockItem.stocklineid != None) l = l.all() with printer.driver as d: d.printline("\t{}".format(tillconfig.pubname), emph=1) d.printline() d.printline("\tPrice List", colour=1) d.printline() current_dept = None for st in l: if st.department != current_dept: if current_dept is not None: d.printline() current_dept = st.department d.printline(current_dept.description, emph=1) d.printline("{}\t\t{}{}".format( st.descriptions[0], tillconfig.currency, st.pricestr)) d.printline() d.printline("\tEnd of list")
def test_default_forms(session): """Check that each pokemon has one default form and each species has one default pokemon.""" q = session.query(tables.Pokemon) q = q.join(tables.PokemonForm) q = q.filter(tables.PokemonForm.is_default==True) q = q.options(lazyload('*')) q = q.group_by(tables.Pokemon) q = q.add_columns(func.count(tables.PokemonForm.id)) for pokemon, num_default_forms in q: if num_default_forms == 0: pytest.fail("pokemon %s has no default forms" % pokemon.name) elif num_default_forms > 1: pytest.fail("pokemon %s has %d default forms" % (pokemon.name, num_default_forms)) q = session.query(tables.PokemonSpecies) q = q.join(tables.Pokemon) q = q.filter(tables.Pokemon.is_default==True) q = q.options(lazyload('*')) q = q.group_by(tables.PokemonSpecies) q = q.add_columns(func.count(tables.Pokemon.id)) for species, num_default_pokemon in q: if num_default_pokemon == 0: pytest.fail("species %s has no default pokemon" % species.name) elif num_default_pokemon > 1: pytest.fail("species %s has %d default pokemon" % (species.name, num_default_pokemon))
def get_recipient_users(self): groups = [] if self.daily_schedule_subscribers.data: log.info("Email recipients includes daily schedule subscribers") groups.append(User.query .options( lazyload(User.organisation), lazyload(User.committee_alerts), )\ .filter(User.subscribe_daily_schedule == True) .filter(User.confirmed_at != None) .all()) if self.committee_ids.data: log.info("Email recipients includes subscribers for these committees: %s" % self.committee_ids.data) user_ids = db.session\ .query(distinct(user_committee_alerts.c.user_id))\ .filter(user_committee_alerts.c.committee_id.in_(self.committee_ids.data))\ .all() user_ids = [u[0] for u in user_ids] groups.append(User.query .options( lazyload(User.organisation), lazyload(User.committee_alerts), )\ .filter(User.id.in_(user_ids)) .filter(User.confirmed_at != None) .all()) return set(u for u in chain(*groups))
def enter(self): if self.wfield.f == '' or self.mfield.f == '' or self.minfield.f == '': ui.infopopup(["You must fill in all three fields."], title="Error") return weeks_ahead = int(self.wfield.f) months_behind = int(self.mfield.f) min_sale = float(self.minfield.f) ahead = datetime.timedelta(days=weeks_ahead * 7) behind = datetime.timedelta(days=months_behind * 30.4) dept = self.deptfield.read() self.dismiss() q = td.s.query(StockType, func.sum(StockOut.qty) / behind.days)\ .join(StockItem)\ .join(StockOut)\ .options(lazyload(StockType.department))\ .options(lazyload(StockType.unit))\ .options(undefer(StockType.instock))\ .filter(StockOut.removecode_id == 'sold')\ .filter((func.now() - StockOut.time) < behind)\ .having(func.sum(StockOut.qty) / behind.days > min_sale)\ .group_by(StockType) if dept: q = q.filter(StockType.dept_id == dept.id) r = q.all() f = ui.tableformatter(' l r r r ') lines = [f(st.format(), '{:0.1f}'.format(sold), st.instock, '{:0.1f}'.format(sold * ahead.days - st.instock)) for st, sold in r] lines.sort(key=lambda l: float(l.fields[3]), reverse=True) header = [f('Name', 'Sold per day', 'In stock', 'Buy')] ui.listpopup(lines, header=header, title="Stock to buy for next {} weeks".format(weeks_ahead), colour=ui.colour_info, show_cursor=False, dismiss=keyboard.K_CASH)
def users_iterate(self, include_unsubscribed=False): if include_unsubscribed: for user in self.session.query(User).options(lazyload('*')).yield_per(10): yield user else: for user in self.session.query(User).options(lazyload('*')).filter(User.unsubscribed == False).yield_per(10): yield user
def test_state_noload_to_lazy(self): """Behavioral test to verify the current activity of loader callables.""" users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User, ) mapper(User, users, properties={"addresses": relationship(Address, lazy="noload")}) mapper(Address, addresses) sess = create_session() u1 = sess.query(User).options(lazyload(User.addresses)).first() assert isinstance(attributes.instance_state(u1).callables["addresses"], strategies.LoadLazyAttribute) # expire, it stays sess.expire(u1) assert isinstance(attributes.instance_state(u1).callables["addresses"], strategies.LoadLazyAttribute) # load over it. callable goes away. sess.query(User).first() assert "addresses" not in attributes.instance_state(u1).callables sess.expunge_all() u1 = sess.query(User).options(lazyload(User.addresses)).first() sess.expire(u1, ["addresses"]) assert isinstance(attributes.instance_state(u1).callables["addresses"], strategies.LoadLazyAttribute) # load the attr, goes away u1.addresses assert "addresses" not in attributes.instance_state(u1).callables
def render(self, session, cluster, **arguments): q = session.query(self.query_class) vm_q = session.query(VirtualMachine) vm_q = vm_q.join(ClusterResource, Cluster) q = q.filter_by(name=cluster) vm_q = vm_q.filter_by(name=cluster) vm_q = vm_q.options(joinedload('machine'), joinedload('machine.primary_name'), joinedload('machine.primary_name.fqdn'), lazyload('machine.host')) q = q.options(subqueryload('_hosts'), lazyload('_hosts.cluster'), joinedload('_hosts.host'), joinedload('_hosts.host.hardware_entity'), subqueryload('_metacluster'), joinedload('_metacluster.metacluster'), joinedload('resholder'), subqueryload('resholder.resources'), subqueryload('service_bindings'), subqueryload('allowed_personalities')) q = q.order_by(self.query_class.name) dbclusters = q.all() if cluster and not dbclusters: raise NotFoundException("Cluster %s not found." % cluster) # Manual eager-loading of VM resources. All the code does is making sure # the data is pinned in the session's cache machines = {} for vm in vm_q: machines[vm.machine.machine_id] = vm return dbclusters
def stockcheck(request, info, session): buylist = [] depts = session.query(Department).order_by(Department.id).all() if request.method == "POST": form = StockCheckForm(depts, request.POST) if form.is_valid(): cd = form.cleaned_data ahead = datetime.timedelta(days=cd["weeks_ahead"] * 7) behind = datetime.timedelta(days=cd["months_behind"] * 30.4) min_sale = cd["minimum_sold"] dept = int(cd["department"]) q = ( session.query(StockType, func.sum(StockOut.qty) / behind.days) .join(StockItem) .join(StockOut) .options(lazyload(StockType.department)) .options(lazyload(StockType.unit)) .options(undefer(StockType.instock)) .filter(StockOut.removecode_id == "sold") .filter((func.now() - StockOut.time) < behind) .filter(StockType.dept_id == dept) .having(func.sum(StockOut.qty) / behind.days > min_sale) .group_by(StockType) ) r = q.all() buylist = [(st, "{:0.1f}".format(sold), "{:0.1f}".format(sold * ahead.days - st.instock)) for st, sold in r] buylist.sort(key=lambda l: float(l[2]), reverse=True) else: form = StockCheckForm(depts) return ("stockcheck.html", {"form": form, "buylist": buylist})
def page(cls, _db, title, url, annotator=None, use_materialized_works=True): """Create a feed of content to preload on devices.""" configured_content = Configuration.policy(Configuration.PRELOADED_CONTENT) identifiers = [Identifier.parse_urn(_db, urn)[0] for urn in configured_content] identifier_ids = [identifier.id for identifier in identifiers] if use_materialized_works: from core.model import MaterializedWork q = _db.query(MaterializedWork) q = q.filter(MaterializedWork.primary_identifier_id.in_(identifier_ids)) # Avoid eager loading of objects that are contained in the # materialized view. q = q.options( lazyload(MaterializedWork.license_pool, LicensePool.data_source), lazyload(MaterializedWork.license_pool, LicensePool.identifier), lazyload(MaterializedWork.license_pool, LicensePool.edition), ) else: q = _db.query(Work).join(Work.primary_edition) q = q.filter(Edition.primary_identifier_id.in_(identifier_ids)) works = q.all() feed = cls(_db, title, url, works, annotator) annotator.annotate_feed(feed, None) content = unicode(feed) return content
def test_invocation_systemwide_loaders(self): baked.bake_lazy_loaders() try: User, Address = self._o2m_fixture() sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # invoked is_( el.mock_calls[0][1][1], u1._sa_instance_state ) finally: baked.unbake_lazy_loaders() clear_mappers() User, Address = self._o2m_fixture() sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # not invoked eq_(el.mock_calls, [])
def get_query(self): return Document.query\ .options( joinedload(Document.sources), joinedload(Document.fairness), joinedload(Document.medium), lazyload('sources.person'), lazyload('sources.unnamed_gender'), lazyload('sources.unnamed_race'))
def minister_questions_combined(): """ Mixture of old QuestionReplies and new CommitteeQuestion objects folded together in date order to support pagination. """ filters = get_filters() # To make pagination possible, we grab a combined list of IDs, # paginate that list, and then fetch the details. # get a combined list of IDs q1 = db.session.query(CommitteeQuestion.id, CommitteeQuestion.date.label("date"), literal_column("'cq'").label("type")) for f in filters: q1 = q1.filter_by(**f) q2 = db.session.query(QuestionReply.id, QuestionReply.start_date.label("date"), literal_column("'qr'").label("type")) for f in filters: q2 = q2.filter_by(**f) query = q1.union_all(q2).order_by(desc("date")) query, count, next = paginate_request_query(query) # pull out the IDs we want cq_ids = [c[0] for c in query if c[2] == 'cq'] qr_ids = [c[0] for c in query if c[2] == 'qr'] # get committee questions query = CommitteeQuestion.list()\ .filter(CommitteeQuestion.id.in_(cq_ids))\ .order_by(CommitteeQuestion.date.desc())\ .options( lazyload('committee'), lazyload('minister'), joinedload('asked_by_member'), lazyload('asked_by_member.memberships')) for f in filters: query = query.filter_by(**f) objects = query.all() # get question reply objects query = QuestionReply.list()\ .filter(QuestionReply.id.in_(qr_ids))\ .order_by(QuestionReply.start_date.desc())\ .options( lazyload('committee'), lazyload('minister')) for f in filters: query = query.filter_by(**f) # mash them together objects.extend(query.all()) # sort objects.sort(key=lambda x: getattr(x, 'date', getattr(x, 'start_date', None)), reverse=True) out = serializers.queryset_to_json(objects, count=count, next=next) return send_api_response(out)
def render(self, session, hostname, machine, cpuname, cpuvendor, cpuspeed, cpucount, memory, cluster, share, fullinfo, style, **arguments): if fullinfo or style != 'raw': q = search_hardware_entity_query(session, Machine, **arguments) else: q = search_hardware_entity_query(session, Machine.label, **arguments) if machine: q = q.filter_by(label=machine) if hostname: dns_rec = DnsRecord.get_unique(session, fqdn=hostname, compel=True) q = q.filter(Machine.primary_name_id == dns_rec.id) if cpuname or cpuvendor or cpuspeed is not None: subq = Cpu.get_matching_query(session, name=cpuname, vendor=cpuvendor, speed=cpuspeed, compel=True) q = q.filter(Machine.cpu_id.in_(subq)) if cpucount is not None: q = q.filter_by(cpu_quantity=cpucount) if memory is not None: q = q.filter_by(memory=memory) if cluster: dbcluster = Cluster.get_unique(session, cluster, compel=True) if isinstance(dbcluster, MetaCluster): q = q.join('vm_container', ClusterResource, Cluster) q = q.filter_by(metacluster=dbcluster) else: q = q.join('vm_container', ClusterResource) q = q.filter_by(cluster=dbcluster) q = q.reset_joinpoint() if share: v2shares = session.query(Share.id).filter_by(name=share) if not v2shares.count(): raise NotFoundException("No shares found with name {0}." .format(share)) NasAlias = aliased(VirtualNasDisk) q = q.join('disks', (NasAlias, NasAlias.id == Disk.id)) q = q.filter(NasAlias.share_id.in_(v2shares.subquery())) q = q.reset_joinpoint() if fullinfo or style != "raw": q = q.options(joinedload('location'), subqueryload('interfaces'), lazyload('interfaces.hardware_entity'), joinedload('interfaces.assignments'), joinedload('interfaces.assignments.dns_records'), joinedload('chassis_slot'), subqueryload('chassis_slot.chassis'), subqueryload('disks'), subqueryload('host'), lazyload('host.hardware_entity'), subqueryload('host.services_used'), subqueryload('host._cluster'), lazyload('host._cluster.host')) return q.all() return StringAttributeList(q.all(), "label")
def render(self, session, **arguments): q = session.query(DnsDomain) q = q.options(undefer('comments'), subqueryload('dns_maps'), lazyload('dns_maps.dns_domain'), subqueryload('_ns_records'), lazyload('_ns_records.dns_domain'), joinedload('_ns_records.a_record.fqdn'), joinedload('_ns_records.a_record.fqdn.dns_domain')) return q.all()
def edit_subscription_route(): form = SubscriptionForm() if form.validate_on_submit(): subscription = Subscription.query \ .filter(Subscription.id == request.form.get('subscriptionid')) \ .options(lazyload('user')) \ .options(lazyload('event')).one() subscription.comment = request.form.get('comment') subscription.commitment = request.form.get('commitment') db.session.commit() flash('Subscription updated') return redirect(url_for('event_route', id=subscription.event_id))
def minister_questions(minister_id): """ Questions asked to a minister """ # don't eager load duplicate committee details query = CommitteeQuestion.list()\ .filter(CommitteeQuestion.minister_id == minister_id)\ .order_by(CommitteeQuestion.date.desc())\ .options( lazyload('committee'), lazyload('minister'), joinedload('asked_by_member'), lazyload('asked_by_member.memberships')) return api_resource_list(query)
def test_load_only_using_load_arg(self): item = (self.db.query(Foo) .load_only(orm.lazyload(Foo.bars), '_id', 'string') .first().bars[0].__dict__) self.assertIn('string', item) self.assertNotIn('number', item) self.assertNotIn('boolean', item)
def random_post(v): x = g.db.query(Submission).options(lazyload('board')).filter_by( is_banned=False, ).filter(Submission.deleted_utc == 0) now = int(time.time()) cutoff = now - (60 * 60 * 24 * 180) x = x.filter(Submission.created_utc >= cutoff) if not (v and v.over_18): x = x.filter_by(over_18=False) if not (v and v.show_nsfl): x = x.filter_by(is_nsfl=False) if v and v.hide_offensive: x = x.filter_by(is_offensive=False) if v and v.hide_bot: x = x.filter_by(is_bot=False) if v: bans = g.db.query( BanRelationship.board_id).filter_by(user_id=v.id).subquery() x = x.filter(Submission.board_id.notin_(bans)) x = x.join(Submission.board).filter(Board.is_banned == False) total = x.count() n = random.randint(0, total - 1) post = x.order_by(Submission.id.asc()).offset(n).limit(1).first() return redirect(post.permalink)
def upgrade(pyramid_env): with context.begin_transaction(): op.create_table( 'text_fragment_identifier', sa.Column('id', sa.Integer, primary_key=True), sa.Column('extract_id', sa.Integer, sa.ForeignKey('extract.id')), sa.Column('xpath_start', sa.String), sa.Column('offset_start', sa.Integer), sa.Column('xpath_end', sa.String), sa.Column('offset_end', sa.Integer) ) op.add_column('extract', sa.Column('annotation_text', sa.UnicodeText)) # Do stuff with the app's models here. from assembl.models import Extract db = Extract.db() with transaction.manager: q = db.execute(''' SELECT extract.id, email.subject, email.body, post.id FROM extract JOIN email ON (email.id = extract.source_id) JOIN content ON (email.id = content.id) JOIN post ON (post.content_id = email.id) WHERE content.type = 'email' ''') vals = {ex_id: (sub, body, postid) for (ex_id, sub, body, postid) in q} for extract in db.query(Extract).options(lazyload('*')).all(): v = vals.get(extract.id) if v: tfi = extract._infer_text_fragment_inner(*v) if tfi: db.add(tfi)
def get(self): session = self.db_session # query demo user_obj = session.query(User) \ .options(defer(User.email), lazyload("address_obj_s").load_only("name")).first() user_name = self.login_user.email if self.login_user else user_obj.name if user_obj: if self.login_user: self.render_template(DASHBOARD_DIR_NAME, 'page_user_dashboard.html', name=user_name, address_obj_s=user_obj.address_obj_s) else: self.render_template(DASHBOARD_DIR_NAME, 'page_dashboard.html', name=user_name, address_obj_s=user_obj.address_obj_s) else: obj = User(name="jiang", email="*****@*****.**") session.add(obj) session.flush() address_1 = Address(name="address name 1", address="Rd 1", user_id=obj.id) address_2 = Address(name="address name 2", address="Rd 2", user_id=obj.id) session.add(address_1) session.add(address_2) session.commit() self.render_template(DASHBOARD_DIR_NAME, 'page_dashboard.html', name=user_name, address_obj_s=obj.address_obj_s)
def get_top_pipelines(self, db, metric, limit=None): metric_name = metric.name pipeline = aliased(database.Pipeline) crossval_score = ( select([func.avg(database.CrossValidationScore.value)]) .where(database.CrossValidationScore.cross_validation_id == database.CrossValidation.id) .where(database.CrossValidationScore.metric == metric_name) .where(database.CrossValidation.pipeline_id == pipeline.id) .as_scalar() ) if metric.best_value() == 1: crossval_score_order = crossval_score.desc() else: crossval_score_order = crossval_score.asc() # Error based metrics q = ( db.query(pipeline, crossval_score) .filter(pipeline.id.in_(self.pipelines)) .filter(crossval_score != None) # FIXME: Using a joined load here results in duplicated results .options(lazyload(pipeline.parameters)) .order_by(crossval_score_order) ) if limit is not None: q = q.limit(limit) return q.all()
def saved_idlist(self, page=1): posts = g.db.query(Submission.id).options(lazyload('*')).filter_by( is_banned=False, deleted_utc=0) if not self.over_18: posts = posts.filter_by(over_18=False) saved = g.db.query(SaveRelationship.submission_id).filter( SaveRelationship.user_id == self.id).subquery() posts = posts.filter(Submission.id.in_(saved)) if self.admin_level < 4: # admins can see everything m = g.db.query(ModRelationship.board_id).filter_by( user_id=self.id, invite_rescinded=False).subquery() c = g.db.query(ContributorRelationship.board_id).filter_by( user_id=self.id).subquery() posts = posts.filter( or_(Submission.author_id == self.id, Submission.post_public == True, Submission.board_id.in_(m), Submission.board_id.in_(c))) blocking = g.db.query( UserBlock.target_id).filter_by(user_id=self.id).subquery() blocked = g.db.query( UserBlock.user_id).filter_by(target_id=self.id).subquery() posts = posts.filter(Submission.author_id.notin_(blocking), Submission.author_id.notin_(blocked)) posts = posts.order_by(Submission.created_utc.desc()) return [x[0] for x in posts.offset(25 * (page - 1)).limit(26).all()]
def _get_resource(self, context, model, id, for_update=False): resource = None try: if for_update: # To lock the instance for update, return a single # instance, instead of an instance with LEFT OUTER # JOINs that do not work in PostgreSQL query = self._model_query(context, model).options( lazyload('*') ).filter( model.id == id).with_lockmode('update') resource = query.one() else: resource = self._get_by_id(context, model, id) except exc.NoResultFound: with excutils.save_and_reraise_exception(reraise=False) as ctx: if issubclass(model, (models.LoadBalancer, models.Listener, models.L7Policy, models.L7Rule, models.PoolV2, models.MemberV2, models.HealthMonitorV2, models.LoadBalancerStatistics, models.SessionPersistenceV2)): raise loadbalancerv2.EntityNotFound(name=model.NAME, id=id) ctx.reraise = True return resource
def delete(id): """ Delete one order by id. Method: *DELETE* URI: */orders/id* """ # Query query = db.session.query(Order).options(lazyload('*')) order = query.get(id) # Check menu if order is None: return make_response(gettext(u"La commande n'existe pas."), 404) # Delete menu db.session.delete(order) # Commit try: db.session.commit() except Exception: # pragma: no cover db.session.rollback() return make_response(gettext(u"Dûe a une erreur inconnu, la commande ne peut pas être supprimée."), 500) # Build the response response = make_response(jsonify(order.to_dict(lines_order=False, state=False))) response.status_code = 200 response.mimetype = 'application/json' return response
def index(): #SELECT pages.id, pages.name, versions.page_id, max_rev, versions.title #FROM ( # SELECT page_id, max(rev) as max_rev # FROM versions # GROUP BY page_id # ORDER BY max_rev DESC #) s #INNER JOIN pages ON s.page_id = pages.id #INNER JOIN versions ON s.max_rev = versions.rev; #max_rev_query = (session.query(Version.page_id, func.max(Version.rev).label("max_rev")) # .group_by(Version.page_id) # .order_by(sql.desc("max_rev")) # .subquery() #) # #pages = session.query(Page, Version.title).join(Version.page).join( # (max_rev_query, max_rev_query.c.max_rev == Version.rev) #) #pages = Page.query.options(undefer_group("text")) pages = Page.query.options(lazyload("tags")).order_by(sql.desc("max_rev")) tags = Tag.all() return render_template("index.html", tags=tags, pages=pages.all())
def getUserGroups(login_id, groups): """ get group for users: a) the groups defined by his user_group_memberships b) the users who have him assigned as a delegation c) the groups of the delegation user. """ if login_id not in groups: groups.append(login_id) session = Session() db_user = session.query(domain.User).filter(domain.User.login == login_id).all() if len(db_user) == 1: user_id = db_user[0].user_id query = ( session.query(domain.GroupMembership) .filter(rdb.and_(domain.GroupMembership.user_id == user_id, domain.GroupMembership.active_p == True)) .options(eagerload("group"), lazyload("user")) ) results = query.all() for result in results: if result.group.group_principal_id not in groups: groups.append(result.group.group_principal_id) results = delegation.get_user_delegations(user_id) for result in results: if result.login not in groups: groups = groups + getUserGroups(result.login, groups) return groups
def render(self, session, **arguments): q = session.query(NetworkDevice) q = q.options(subqueryload('location'), subqueryload('interfaces'), lazyload('interfaces.hardware_entity'), joinedload('interfaces.assignments'), joinedload('interfaces.assignments.dns_records'), joinedload('interfaces.assignments.network'), subqueryload('observed_macs'), undefer('observed_macs.creation_date'), subqueryload('observed_vlans'), undefer('observed_vlans.creation_date'), joinedload('observed_vlans.network'), subqueryload('model'), # Switches don't have machine specs, but the formatter # checks for their existence anyway joinedload('model.machine_specs')) # Prefer the primary name for ordering q = q.outerjoin(DnsRecord, (Fqdn, DnsRecord.fqdn_id == Fqdn.id), DnsDomain) q = q.options(contains_eager('primary_name'), contains_eager('primary_name.fqdn'), contains_eager('primary_name.fqdn.dns_domain')) q = q.reset_joinpoint() q = q.order_by(Fqdn.name, DnsDomain.name, NetworkDevice.label) return q.all()
def find_document(path, compareto): j = documents prev_elements = None query = session.query(Document) first = True for i, match in enumerate( re.finditer(r"/([\w_]+)(?:\[@([\w_]+)(?:=(.*))?\])?", path) ): (token, attrname, attrvalue) = match.group(1, 2, 3) if first: query = query.join("_nodes", aliased=True).filter( _Node.parent_id == None ) first = False else: query = query.join("children", aliased=True, from_joinpoint=True) query = query.filter(_Node.tag == token) if attrname: query = query.join("attributes", aliased=True, from_joinpoint=True) if attrvalue: query = query.filter( and_( _Attribute.name == attrname, _Attribute.value == attrvalue, ) ) else: query = query.filter(_Attribute.name == attrname) return ( query.options(lazyload("_nodes")).filter(_Node.text == compareto).all() )
def test_instance_lazy_relation_loaders(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={ 'addresses': relationship(Address, lazy='noload') }) mapper(Address, addresses) sess = Session() u1 = User(name='ed', addresses=[ Address( email_address='*****@*****.**', ) ]) sess.add(u1) sess.commit() sess.close() u1 = sess.query(User).options( lazyload(User.addresses) ).first() u2 = pickle.loads(pickle.dumps(u1)) sess = Session() sess.add(u2) assert u2.addresses
def test_invocation_per_mapper(self): """test that BakedLazyLoader is getting invoked with the "baked_select" lazy setting. """ User, Address = self._o2m_fixture(lazy="baked_select") sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # not invoked eq_(el.mock_calls, []) sess = Session() q = sess.query(User) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # invoked is_( el.mock_calls[0][1][1], u1._sa_instance_state )
def _job_get_next_by_action(session, now, action): # Round off 'now' to minute precision to allow the SQL query cache to # do more work # Testing showed that lazyload is apparently fastest in our specific # case since we only fetch a single job here and there's only one # child table, hence only two simple queries vs. subqueryload which # issues as second more complex query or joinedload which issues # a single more complex join now_round_off = now.replace(second=0, microsecond=0) statuses = ['DONE', 'CANCELLED', 'HARD_TIMED_OUT', 'MAX_RETRIED'] job_ref = session.query(models.Job)\ .options(sa_orm.lazyload('job_metadata'))\ .filter_by(action=action)\ .filter(~models.Job.status.in_(statuses))\ .filter(sa_sql.or_(models.Job.worker_id.is_(None), models.Job.timeout <= now_round_off))\ .order_by(models.Job.updated_at.asc())\ .first() # Force loading of the job_metadata if job_ref is not None: m = job_ref['job_metadata'] LOG.info(_('Job Metatdata forcefully loaded: %s' % m)) return job_ref
def find_document(path, compareto): query = session.query(Document) attribute = Document._root for i, match in enumerate( re.finditer(r"/([\w_]+)(?:\[@([\w_]+)(?:=(.*))?\])?", path)): (token, attrname, attrvalue) = match.group(1, 2, 3) target_node = aliased(_Node) query = query.join( attribute.of_type(target_node)).filter(target_node.tag == token) attribute = target_node.children if attrname: attribute_entity = aliased(_Attribute) if attrvalue: query = query.join( target_node.attributes.of_type(attribute_entity)).filter( and_( attribute_entity.name == attrname, attribute_entity.value == attrvalue, )) else: query = query.join( target_node.attributes.of_type(attribute_entity)).filter( attribute_entity.name == attrname) return (query.options(lazyload( Document._root)).filter(target_node.text == compareto).all())
def destroy(session, data, model_class=None, synchronize_session=False): """ Delete bulk `data`. The `data` argument can be any of the following: - Single instance of `model_class` - List of `model_class` instances - Primary key value (single value or ``tuple`` of values for composite keys) - List of primary key values. - Dict containing primary key(s) mapping - List of dicts with primary key(s) mappings If a non-`model_class` instances are passed in, then `model_class` is required to know which table to delete from. Args: session (Session): SQLAlchemy session object. data (mixed): Data to delete from database. synchronize_session (bool|str): Argument passed to ``Query.delete``. Returns: int: Number of deleted records. """ if not is_sequence(data) or isinstance(data, tuple): data = [data] valid_model_class = isinstance(model_class, DeclarativeMeta) mapped_data = defaultdict(list) for idx, item in enumerate(data): item_class = item.__class__ if not isinstance(item_class, DeclarativeMeta) and valid_model_class: class_ = model_class else: class_ = item_class if not isinstance(class_, DeclarativeMeta): raise TypeError( f"Type of value given to destory() function is not a valid SQLALchemy declarative" f" class and/or model class argument is not valid. Item with index {idx} and with" f" value {item!r} is an instance of {item_class} and model class is {model_class}." ) mapped_data[class_].append(item) delete_count = 0 with transaction(session): for model_class, data in mapped_data.items(): count = (session.query(model_class).filter( primary_key_filter(data, model_class)).options( orm.lazyload("*")).delete( synchronize_session=synchronize_session)) delete_count += count return delete_count
def proje_notlari(proje_id): """ projeye eklenen özel notları görüntüler Args: proje_id(int): projenin id si Returns: """ proje = DB.session.query(Proje).options( joinedload(Proje.proje_yurutucu).load_only("id").joinedload( OgretimElemani.personel).load_only("id").joinedload( Personel.person).load_only("ad", "soyad"), lazyload(Proje.proje_detayi), lazyload(Proje.kabul_edilen_proje_hakemleri), lazyload(Proje.proje_hakem_onerileri), lazyload(Proje.proje_destekleyen_kurulus), lazyload(Proje.proje_kalemleri), ).filter( Proje.id == proje_id, or_( Proje.proje_basvuru_durumu == ProjeBasvuruDurumu.tamamlandi, Proje.proje_basvuru_durumu == ProjeBasvuruDurumu.revizyon_bekleniyor)).first() next_states_info = get_next_states_info(proje_id=proje_id) actions_info = get_actions_info(proje_id=proje_id) proje_yurutucusu_mu = ProjeYurutucusu().fulfill(user=current_user) proje_notlari = DB.session.query( ProjeNot, Person.ad.label("ad"), Person.soyad.label("soyad")).filter( ProjeNot.proje_id == proje_id).join( Person, Person.user_id == ProjeNot.notu_ekleyen_yetkili).all() if not proje: pass # todo: proje bulunamadı hatası dön return render_template("dashboard/proje_notlari.html", proje=proje, next_states_info=next_states_info, proje_yurutucusu_mu=proje_yurutucusu_mu, actions_info=actions_info, proje_notlari=proje_notlari, proje_id=proje.id)
def eval_ast(ast, session): assert isinstance(ast, RootNode), \ 'Must start evaluation on RootNode!' root_alias = aliased(ast.model) query = session.query(root_alias).options(lazyload('*')) for c in ast.children: query = _eval_ast(c, query, root_alias, tuple()) return query.all()
def get_jobs(self, volumepattern): log.debug("Calling with volumepattern[%s]", volumepattern) volumes = self.session.query(Media).options(lazyload('*'), ).filter( Media.volumename.like(volumepattern)) jobset = Set() [[jobset.add(job.jobid) for job in volume.jobs] for volume in volumes if len(volume.jobs) > 0] return list(jobset)
def populate_initiative_map(db: Session = Depends(get_db)) -> Dict: initiatives = db.query(Initiative).options(lazyload( Initiative.roles_rel)).all() result = {} if initiatives is not None: for item in initiatives: result[item] = False return result
def build_initial_query(self, trans, **kwargs): # See optimization description comments and TODO for tags in matching public histories query. # In addition to that - be sure to lazyload the latest_workflow - it isn't needed and it causes all # of its steps to be eagerly loaded. return trans.sa_session.query(self.model_class).join("user").options( lazyload("latest_workflow"), eagerload("user").load_only("username"), eagerload("annotations"), undefer("average_rating"))
def get_volumes(self, volumepattern, empty=False): log.debug("Calling with volumepattern[%s]", volumepattern) volumes = self.session.query(Media).options(lazyload('*'), ).filter( Media.volumename.like(volumepattern)) if empty: volumes = volumes.filter(~Media.jobs.any()) return volumes
def test_state_noload_to_lazy(self): """Behavioral test to verify the current activity of loader callables.""" users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper( User, users, properties={'addresses': relationship(Address, lazy='noload')}) mapper(Address, addresses) sess = create_session() u1 = sess.query(User).options(lazyload(User.addresses)).first() assert isinstance( attributes.instance_state(u1).callables['addresses'], strategies.LoadLazyAttribute ) # expire, it stays sess.expire(u1) assert 'addresses' not in attributes.instance_state(u1).expired_attributes assert isinstance( attributes.instance_state(u1).callables['addresses'], strategies.LoadLazyAttribute ) # load over it. callable goes away. sess.query(User).first() assert 'addresses' not in attributes.instance_state(u1).expired_attributes assert 'addresses' not in attributes.instance_state(u1).callables sess.expunge_all() u1 = sess.query(User).options(lazyload(User.addresses)).first() sess.expire(u1, ['addresses']) assert 'addresses' not in attributes.instance_state(u1).expired_attributes assert isinstance( attributes.instance_state(u1).callables['addresses'], strategies.LoadLazyAttribute ) # load the attr, goes away u1.addresses assert 'addresses' not in attributes.instance_state(u1).expired_attributes assert 'addresses' not in attributes.instance_state(u1).callables
def _test_baked_lazy_loading(self, set_option): User, Address = self.classes.User, self.classes.Address base_bq = self.bakery( lambda s: s.query(User)) if set_option: base_bq += lambda q: q.options(lazyload(User.addresses)) base_bq += lambda q: q.order_by(User.id) assert_result = self.static.user_address_result for i in range(4): for cond1, cond2 in itertools.product( *[(False, True) for j in range(2)]): bq = base_bq._clone() sess = Session() if cond1: bq += lambda q: q.filter(User.name == 'jack') else: bq += lambda q: q.filter(User.name.like('%ed%')) if cond2: ct = func.count(Address.id).label('count') subq = sess.query( ct, Address.user_id).group_by(Address.user_id).\ having(ct > 2).subquery() bq += lambda q: q.join(subq) if cond2: if cond1: def go(): result = bq(sess).all() eq_([], result) self.assert_sql_count(testing.db, go, 1) else: def go(): result = bq(sess).all() eq_(assert_result[1:2], result) self.assert_sql_count(testing.db, go, 2) else: if cond1: def go(): result = bq(sess).all() eq_(assert_result[0:1], result) self.assert_sql_count(testing.db, go, 2) else: def go(): result = bq(sess).all() eq_(assert_result[1:3], result) self.assert_sql_count(testing.db, go, 3) sess.close()
def get_fork_list(): fork_list = ['https://github.com/searx/searx'] with get_engine().connect() as connection: with new_session(bind=connection) as session: for fork in session.query(Fork).options(lazyload(Fork.commits)): git_url = fork.git_url if git_url not in fork_list: fork_list.append(git_url) return fork_list
def _match_uuid(self, uuid: str, load_query=None) -> Union[EvenementEntity, None]: query = self.session.query(EvenementEntity).filter( EvenementEntity.uuid == uuid).options(lazyload("*")) matches = query.all() if matches: return matches[0]
def committee_meeting_attendance(committee_meeting_id): """ MP attendance of committee meetings. """ query = CommitteeMeetingAttendance.list()\ .filter(CommitteeMeetingAttendance.meeting_id == committee_meeting_id)\ .options(lazyload('member.memberships')) return api_resource_list(query)
def render_gundem_sablonu(): """ İki farklı kullanımı mevcut Belirli bir gündemin gündem tipini degistirmeye calisildigi durumda Gündem id ve gündem tipi verisine(gundem verisinden ilgili projenin id si bulunur) gore ilgili sablonu render edip karar ve aciklama verisini döner. Belirli bir proje icin gundem olusturmaya calisirken proje id ve gundem tipi verisiyle kullanilabilir """ try: gundem_id = request.get_json().get("gundem_id", None) sablon_tipi = request.get_json().get("sablon_tipi", None) proje_id = request.get_json().get("proje_id", None) gundem_sablonu = DB.session.query( GundemSablon.id.label("sablon_id"), GundemSablon.karar.label("karar"), GundemSablon.aciklama.label("aciklama"), ).filter( GundemSablon.sablon_tipi == sablon_tipi ).order_by(desc(GundemSablon.updated_at)).first() if gundem_id: gundem = DB.session.query( BapGundem.aciklama.label("aciklama"), BapGundem.karar.label("karar"), BapGundem.proje_id.label("proje_id") ).options( lazyload("*") ).filter(BapGundem.id == gundem_id).first() proje_id = gundem.proje_id if not gundem_sablonu: return jsonify(status="error"), 500 gundem_proje_data_query = BapQueryHelpers.get_gundem_sablon_proje_data_query() proje_gundem_sablon_data = gundem_proje_data_query.filter( Proje.id == proje_id ).first() karar_text = render_template_string(gundem_sablonu.karar, proje=proje_gundem_sablon_data) aciklama_text = render_template_string(gundem_sablonu.aciklama, proje=proje_gundem_sablon_data) data = { "karar": karar_text, "aciklama": aciklama_text, "sablon_id": gundem_sablonu.sablon_id } return jsonify(status="success", data=data) except Exception as exc: CustomErrorHandler.error_handler( hata="Gündemi tipine göre şablon metni render " "edilirken bir hata meydana geldi. " "Hata: {}".format(exc)) return jsonify(status="error"), 500
def generate_search_query(search_string, filters, order, sort_by_arg, include_user=False): """ Generates query to search transfer accounts by their users' parameters. This is used by search_api, as well as the bulk disbursement API :param search_string - The search query string :param filters - A SQLAlchemy filter object to apply to the query :param order - Which order in which to display results. Use sqlalchemy.asc or sqlalchemy.desc :param sort_by_arg: Boolean. True returns original phone """ sort_types_to_database_types = { 'first_name': User.first_name, 'last_name': User.last_name, 'email': User.email, 'date_account_created': User.created, 'rank': 'rank', 'balance': TransferAccount._balance_wei, 'status': TransferAccount.is_approved, } if sort_by_arg not in sort_types_to_database_types: return { 'message': f'Invalid sort_by value {sort_by_arg}. Please use one of the following: {sort_types_to_database_types.keys()}'\ } # To add new searchable column, simply add a new SearchableColumn object! # And don't forget to add a trigram index on that column too-- see migration 33df5e72fca4 for reference user_search_columns = [ SearchableColumn('first_name', User.first_name, rank=1.5), SearchableColumn('last_name', User.last_name, rank=1.5), SearchableColumn('phone', User.phone, rank=2), SearchableColumn('public_serial_number', User.public_serial_number, rank=2), SearchableColumn('location', User.location, rank=1), SearchableColumn('primary_blockchain_address', User.primary_blockchain_address, rank=2), ] sum_search = reduce(lambda x,y: x+y, [sc.get_similarity_query(search_string) for sc in user_search_columns]) sort_by = sum_search if sort_by_arg == 'rank' else sort_types_to_database_types[sort_by_arg] # If there's no search string, the process is the same, just sort by account creation date sort_by = sort_types_to_database_types['date_account_created'] if sort_by == 'rank' and not search_string else sort_by entities = [TransferAccount, sum_search, User] if include_user else [TransferAccount] final_query = db.session.query(TransferAccount, User, sum_search)\ .outerjoin(TransferAccount, User.default_transfer_account_id == TransferAccount.id)\ .filter(TransferAccount.is_ghost != True) \ .with_entities(*entities)\ .order_by(order(sort_by)) # TODO: work out the difference between the above and # final_query = db.session.query(TransferAccount, User) \ # .outerjoin(TransferAccount, User.default_transfer_account_id == TransferAccount.id) \ # .with_entities(TransferAccount) \ # .order_by(order(sort_by)) # Joining custom attributes is quite expensive, and we don't need them in a listing of search results if include_user: final_query = final_query.options(lazyload(User.custom_attributes)) # If there is a search string, we only want to return ranked results! final_query = final_query.filter(sum_search!=0) if search_string else final_query return apply_filters(final_query, filters, User)
def _get_experiment_sqa_immutable_opt_config_and_search_space( experiment_name: str, exp_sqa_class: Type[SQAExperiment]) -> SQAExperiment: """For experiments where the search space and opt config are immutable, we don't store copies of search space and opt config on each generator run. Therefore, there's no need to try to load these copies from the DB -- these queries will always return an empty list, and are therefore unnecessary and wasteful. """ return _get_experiment_sqa( experiment_name=experiment_name, exp_sqa_class=exp_sqa_class, query_options=[ lazyload("trials.generator_runs.parameters"), lazyload("trials.generator_runs.parameter_constraints"), lazyload("trials.generator_runs.metrics"), ], )
def system_notif_count(self): return self.notifications.options( lazyload('*') ).join( Notification.comment ).filter( Notification.read==False, Comment.author_id==1 ).count()
def member_attendance(member_id): """ MP attendance of committee meetings. """ query = CommitteeMeetingAttendance.list()\ .filter(CommitteeMeetingAttendance.member_id == member_id)\ .options(lazyload('member'), joinedload('meeting')) return api_resource_list(query)
def notification_commentlisting(self, page=1, all_=False, replies_only=False, mentions_only=False, system_only=False): notifications = self.notifications.options( lazyload('*'), joinedload(Notification.comment).lazyload('*'), joinedload(Notification.comment).joinedload( Comment.comment_aux)).join(Notification.comment).filter( Comment.is_banned == False, Comment.deleted_utc == 0) if replies_only: cs = g.db.query( Comment.id).filter(Comment.author_id == self.id).subquery() ps = g.db.query(Submission.id).filter( Submission.author_id == self.id).subquery() notifications = notifications.filter( or_( Comment.parent_comment_id.in_(cs), and_(Comment.level == 1, Comment.parent_submission.in_(ps)))) elif mentions_only: cs = g.db.query( Comment.id).filter(Comment.author_id == self.id).subquery() ps = g.db.query(Submission.id).filter( Submission.author_id == self.id).subquery() notifications = notifications.filter( and_( Comment.parent_comment_id.notin_(cs), or_(Comment.level > 1, Comment.parent_submission.notin_(ps)))) elif system_only: notifications = notifications.filter(Comment.author_id == 1) elif not all_: notifications = notifications.filter(Notification.read == False) notifications = notifications.options( contains_eager(Notification.comment)) notifications = notifications.order_by(Notification.id.desc()).offset( 25 * (page - 1)).limit(26) output = [] for x in notifications[0:25]: x.read = True g.db.add(x) output.append(x.comment_id) g.db.commit() return output
def post_hakem_proje_degerlendirme_istekleri(self, proje_hakem_id, proje_id): # pylint: disable=R0201 """ Hakem degerlendirmesi kabul veya reddi icin kullanilir. """ proje_hakemi = DB.session.query(ProjeHakemleri).options( joinedload(ProjeHakemleri.proje).load_only(Proje.proje_no)).filter( ProjeHakemleri.id == proje_hakem_id, ProjeHakemleri.davet_durumu == ProjeHakemDavetDurumlari.gonderildi).one() hakem = DB.session.query(Hakem.id.label("hakem_id")).options( lazyload('*')).filter( Hakem.person_id == current_user.person.id).one() if proje_hakemi: if proje_hakemi.hakem_id != hakem.hakem_id: return jsonify(status="error"), 500 davet_durumu = request.get_json().get('davet_durumu') proje_hakemi.davet_durumu = davet_durumu DB.session.commit() if proje_hakemi.davet_durumu == ProjeHakemDavetDurumlari.kabul_edildi: message = "hakemlik davetini kabul etti" else: message = "hakemlik davetini reddetti" extra_message = """{} adlı kullanıcı({} id'li hakem),{} numaralı projenin {}""".format( current_user.username, hakem.hakem_id, proje_hakemi.proje.proje_no, message) signal_payload = { "message_type": USER_ACTIVITY_MESSAGES.get("bap").get( "degerlendirme_teklifi_cevaplandi").type_index, "nesne": 'Proje Hakemleri', "nesne_id": proje_hakemi.id, "ekstra_mesaj": extra_message } signal_sender(**signal_payload) for bap_admin in bap_yetkili_and_admin_ids(): payload = { "notification_receiver": bap_admin.person_id, "notification_title": proje_hakemi.davet_durumu.value, "notification_message": "{} adlı kullanıcı {} numaralı projenin {}".format( current_user.username, proje_hakemi.proje.proje_no, message), } signal_sender(log=False, notification=True, **payload) return jsonify(status="success") return abort(400)
def browse_guilds(v): page = int(request.args.get("page", 1)) # prevent invalid paging page = max(page, 1) sort_method = request.args.get("sort", "trending") # get list of ids ids = guild_ids( sort=sort_method, page=page, nsfw=(v and v.over_18), cats=request.args.get("cats").split(',') if request.args.get("cats") else None ) # check existence of next page next_exists = (len(ids) == 26) ids = ids[0:25] # check if ids exist if ids: # assemble list of tuples i = 1 tups = [] for x in ids: tups.append((x, i)) i += 1 # tuple string tups = str(tups).lstrip("[").rstrip("]") # hit db for entries boards = g.db.query( Board).options( lazyload( '*' )).filter( Board.id.in_(ids) ).all() boards=sorted(boards, key=lambda x: ids.index(x.id)) else: boards = [] return {"html": lambda: render_template("boards.html", v=v, boards=boards, page=page, next_exists=next_exists, sort_method=sort_method ), "api": lambda: jsonify({"data": [board.json for board in boards]}) }
def userpagelisting(self, v=None, page=1, sort="new"): submissions = g.db.query(Submission.id).options( lazyload('*')).filter_by(author_id=self.id) if not (v and v.over_18): submissions = submissions.filter_by(over_18=False) if v and v.hide_offensive: submissions = submissions.filter_by(is_offensive=False) if v and v.hide_bot: submissions = submissions.filter_by(is_bot=False) if not (v and (v.admin_level >= 3)): submissions = submissions.filter_by(deleted_utc=0) if not (v and (v.admin_level >= 3 or v.id == self.id)): submissions = submissions.filter_by(is_banned=False) if v and v.admin_level >= 4: pass elif v: m = g.db.query( ModRelationship.board_id).filter_by( user_id=v.id, invite_rescinded=False).subquery() c = g.db.query( ContributorRelationship.board_id).filter_by( user_id=v.id).subquery() submissions = submissions.filter( or_( Submission.author_id == v.id, Submission.post_public == True, Submission.board_id.in_(m), Submission.board_id.in_(c) ) ) else: submissions = submissions.filter(Submission.post_public == True) if sort == "hot": submissions = submissions.order_by(Submission.score_best.desc()) elif sort == "new": submissions = submissions.order_by(Submission.created_utc.desc()) elif sort == "old": submissions = submissions.order_by(Submission.created_utc.asc()) elif sort == "disputed": submissions = submissions.order_by(Submission.score_disputed.desc()) elif sort == "top": submissions = submissions.order_by(Submission.score_top.desc()) elif sort == "activity": submissions = submissions.order_by(Submission.score_activity.desc()) listing = [x[0] for x in submissions.offset(25 * (page - 1)).limit(26)] return listing
def _remove_node_from_cluster(self, server_id): changed_routes = {} server = self.session.query(Server).get(server_id) if server: server.set_route(RouteContainer(None, None, None)) lost_routes = self.session.query(Route).filter_by( proxy_server_id=server.id).options( orm.lazyload(Route.destination), orm.lazyload(Route.gate), orm.lazyload(Route.proxy_server)).count() if lost_routes: changed_routes = self._loop.run_until_complete( self._async_refresh_route_table( discover_new_neighbours=False, check_current_neighbours=False)) changed_routes.update({server: RouteContainer(None, None, None)}) return changed_routes
def get_attendance_members(sphere): return Member.query\ .options(joinedload('house'), lazyload('memberships'))\ .join(Member.party)\ .join(Member.house)\ .filter(Party.name.in_(MAJOR_PARTIES))\ .filter(House.sphere==sphere)\ .all()
def make_query(self, model): cols = ['storage_backend', 'storage_file_id', 'md5'] if model.add_file_date_column: cols.append('created_dt') opts = QUERY_OPTIONS.get(model) return (model.query .filter(model.storage_file_id.isnot(None), model.storage_backend.in_(self.source_backend_names)) .filter_by(**SPECIAL_FILTERS.get(model, {})) .options(*((opts,) if opts else ())) .options(lazyload('*'), load_only(*cols)))
def get_supplier_framework_info(supplier_id, framework_slug): supplier_framework = SupplierFramework.find_by_supplier_and_framework( supplier_id, framework_slug).options(lazyload('*')).first() if supplier_framework is None: abort(404) return single_result_response("frameworkInterest", supplier_framework, serialize_kwargs={"with_users": True}), 200
def get_groups(user_id): principal_ids = [] session = Session() query = session.query(domain.GroupMember).filter( rdb.and_(domain.GroupMember.user_id == user_id, domain.GroupMember.active_p == True)).options( eagerload("group"), lazyload("user")) for member in query: principal_ids.append(member.group.principal_name) return principal_ids