def stocklinelist(request, info, session): regular = ( session.query(StockLine) .order_by(StockLine.dept_id, StockLine.name) .filter(StockLine.linetype == "regular") .options(joinedload("stockonsale")) .options(joinedload("stockonsale.stocktype")) .all() ) display = ( session.query(StockLine) .filter(StockLine.linetype == "display") .order_by(StockLine.name) .options(joinedload("stockonsale")) .options(undefer("stockonsale.used")) .all() ) continuous = ( session.query(StockLine) .filter(StockLine.linetype == "continuous") .order_by(StockLine.name) .options(undefer("stocktype.remaining")) .all() ) return ("stocklines.html", {"regular": regular, "display": display, "continuous": continuous})
def index(self, page=1, **kwargs): """List podcasts and podcast media. Our custom paginate decorator allows us to have fewer podcast episodes display on the first page than on the rest with the ``items_first_page`` param. See :class:`mediacore.lib.custompaginate.CustomPage`. :param page: Page number, defaults to 1. :type page: int :rtype: dict :returns: podcasts The :class:`~mediacore.model.podcasts.Podcast` instance episodes The list of :class:`~mediacore.model.media.Media` instances for this page. """ episodes = ( DBSession.query(Media) .filter(Media.podcast_id != None) .order_by(Media.publish_on.desc()) .options(orm.undefer("comment_count_published")) ) episodes = self._filter(episodes) podcasts = DBSession.query(Podcast).options(orm.undefer("published_media_count")).all() return dict(podcasts=podcasts, episodes=episodes)
def stocklinelist(request, info, session): regular = session\ .query(StockLine)\ .order_by(StockLine.dept_id, StockLine.name)\ .filter(StockLine.linetype == "regular")\ .options(joinedload("stockonsale"))\ .options(joinedload("stockonsale.stocktype"))\ .all() display = session\ .query(StockLine)\ .filter(StockLine.linetype == "display")\ .order_by(StockLine.name)\ .options(joinedload("stockonsale"))\ .options(undefer("stockonsale.used"))\ .all() continuous = session\ .query(StockLine)\ .filter(StockLine.linetype == "continuous")\ .order_by(StockLine.name)\ .options(undefer("stocktype.remaining"))\ .all() return ('stocklines.html', { 'nav': [("Stock lines", info.reverse("tillweb-stocklines"))], 'regular': regular, 'display': display, 'continuous': continuous, })
def index(self, page=1, search=None, podcast_filter=None, **kwargs): """List media with pagination and filtering. :param page: Page number, defaults to 1. :type page: int :param search: Optional search term to filter by :type search: unicode or None :param podcast_filter: Optional podcast to filter by :type podcast_filter: int or None :rtype: dict :returns: media The list of :class:`~mediacore.model.media.Media` instances for this page. search The given search term, if any search_form The :class:`~mediacore.forms.admin.SearchForm` instance podcast_filter The given podcast ID to filter by, if any podcast_filter_title The podcast name for rendering if a ``podcast_filter`` was specified. podcast_filter_form The :class:`~mediacore.forms.media.PodcastFilterForm` instance. """ media = DBSession.query(Media)\ .filter(Media.status.excludes('trash'))\ .options(orm.undefer('comment_count_published'))\ .options(orm.undefer('comment_count_unreviewed'))\ .order_by(Media.status.desc(), Media.publish_on.desc(), Media.modified_on.desc()) if search is not None: like_search = '%' + search + '%' media = media.filter(sql.or_( Media.title.like(like_search), Media.description.like(like_search), Media.notes.like(like_search), Media.tags.any(Tag.name.like(like_search)), )) podcast_filter_title = podcast_filter if podcast_filter == 'Unfiled': media = media.filter(~Media.podcast.has()) elif podcast_filter is not None and podcast_filter != 'All Media': media = media.filter(Media.podcast.has(Podcast.id == podcast_filter)) podcast_filter_title = DBSession.query(Podcast.title).get(podcast_filter) podcast_filter = int(podcast_filter) return dict( media = media, podcast_filter = podcast_filter, podcast_filter_title = podcast_filter_title, podcast_filter_form = podcast_filter_form, search = search, search_form = search_form, )
def sessionfinder(request, info, session): if request.method == "POST" and "submit_find" in request.POST: form = SessionFinderForm(request.POST) if form.is_valid(): s = session.query(Session).get(form.cleaned_data["session"]) if s: return HttpResponseRedirect(info["base"] + s.tillweb_url) form.add_error(None, "This session does not exist.") else: form = SessionFinderForm() if request.method == "POST" and "submit_sheet" in request.POST: rangeform = SessionRangeForm(request.POST) if rangeform.is_valid(): cd = rangeform.cleaned_data return spreadsheets.sessionrange( session, start=cd["startdate"], end=cd["enddate"], tillname=info["tillname"] ) else: rangeform = SessionRangeForm() recent = ( session.query(Session) .options(undefer("total")) .options(undefer("actual_total")) .order_by(desc(Session.id))[:30] ) return ("sessions.html", {"recent": recent, "form": form, "rangeform": rangeform})
def _get_eagerloaded_query(self, *args, **kwargs): """Eager hostnames loading. This is too complex to get_joinedloads so I have to override the function """ query = super(VulnerabilityView, self)._get_eagerloaded_query( *args, **kwargs) joinedloads = [ joinedload(Vulnerability.host) .load_only(Host.id) # Only hostnames are needed .joinedload(Host.hostnames), joinedload(Vulnerability.service) .joinedload(Service.host) .joinedload(Host.hostnames), joinedload(VulnerabilityWeb.service) .joinedload(Service.host) .joinedload(Host.hostnames), joinedload(VulnerabilityGeneric.update_user), undefer(VulnerabilityGeneric.creator_command_id), undefer(VulnerabilityGeneric.creator_command_tool), undefer(VulnerabilityGeneric.target_host_ip), undefer(VulnerabilityGeneric.target_host_os), joinedload(VulnerabilityGeneric.evidence), joinedload(VulnerabilityGeneric.tags), ] return query.options(selectin_polymorphic( VulnerabilityGeneric, [Vulnerability, VulnerabilityWeb] ), *joinedloads)
def _query(query): last_week = datetime.now() - timedelta(days=7) return query.options(undefer('post_count'), undefer('posted_at')). \ filter(or_(Topic.status == "open", and_(Topic.status != "open", Topic.posted_at >= last_week)))
def index(self, page=1, **kwargs): """List storage engines with pagination. :rtype: Dict :returns: engines The list of :class:`~mediacore.lib.storage.StorageEngine` instances for this page. """ engines = DBSession.query(StorageEngine)\ .options(orm.undefer('file_count'), orm.undefer('file_size_sum'))\ .all() engines = list(sort_engines(engines)) existing_types = set(ecls.engine_type for ecls in engines) addable_engines = [ ecls for ecls in StorageEngine if not ecls.is_singleton or ecls.engine_type not in existing_types ] return { 'engines': engines, 'addable_engines': addable_engines, }
def _process(self): q = request.args["q"].lower() query = Category.query.filter(Category.title_matches(q)).options( undefer("deep_children_count"), undefer("deep_events_count"), undefer("has_events"), joinedload("acl_entries"), ) if session.user: # Prefer favorite categories query = query.order_by( Category.favorite_of.any(favorite_category_table.c.user_id == session.user.id).desc() ) # Prefer exact matches and matches at the beginning, then order by category title and if # those are identical by the chain titles query = query.order_by( (db.func.lower(Category.title) == q).desc(), db.func.lower(Category.title).startswith(q).desc(), db.func.lower(Category.title), Category.chain_titles, ) total_count = query.count() query = query.limit(10) return jsonify_data( categories=[serialize_category(c, with_favorite=True, with_path=True) for c in query], total_count=total_count, flash=False, )
def get_related_categories(user, detailed=True): """Gets the related categories of a user for the dashboard""" favorites = set() if user.favorite_categories: favorites = set(Category.query .filter(Category.id.in_(c.id for c in user.favorite_categories)) .options(undefer('chain_titles')) .all()) managed = set(Category.query .filter(Category.acl_entries.any(db.and_(CategoryPrincipal.type == PrincipalType.user, CategoryPrincipal.user == user, CategoryPrincipal.has_management_role())), ~Category.is_deleted) .options(undefer('chain_titles'))) if not detailed: return favorites | managed res = {} for categ in favorites | managed: res[(categ.title, categ.id)] = { 'categ': categ, 'favorite': categ in favorites, 'managed': categ in managed, 'path': truncate_path(categ.chain_titles[:-1], chars=50) } return OrderedDict(sorted(res.items(), key=itemgetter(0)))
def test_state_deferred_to_col(self): """Behavioral test to verify the current activity of loader callables.""" users, User = self.tables.users, self.classes.User mapper(User, users, properties={"name": deferred(users.c.name)}) sess = create_session() u1 = sess.query(User).options(undefer(User.name)).first() assert "name" not in attributes.instance_state(u1).callables # mass expire, the attribute was loaded, # the attribute gets the callable sess.expire(u1) assert isinstance(attributes.instance_state(u1).callables["name"], state.InstanceState) # load it, callable is gone u1.name assert "name" not in attributes.instance_state(u1).callables # mass expire, attribute was loaded but then deleted, # the callable goes away - the state wants to flip # it back to its "deferred" loader. sess.expunge_all() u1 = sess.query(User).options(undefer(User.name)).first() del u1.name sess.expire(u1) assert "name" not in attributes.instance_state(u1).callables # single attribute expire, the attribute gets the callable sess.expunge_all() u1 = sess.query(User).options(undefer(User.name)).first() sess.expire(u1, ["name"]) assert isinstance(attributes.instance_state(u1).callables["name"], state.InstanceState)
def render(self, session, **arguments): q = session.query(Switch) q = q.options(subqueryload('location'), subqueryload('interfaces'), joinedload('interfaces.assignments'), joinedload('interfaces.assignments.dns_records'), joinedload('interfaces.assignments.network'), subqueryload('observed_macs'), undefer('observed_macs.creation_date'), subqueryload('observed_vlans'), undefer('observed_vlans.creation_date'), joinedload('observed_vlans.network'), subqueryload('model'), # Switches don't have machine specs, but the formatter # checks for their existence anyway joinedload('model.machine_specs')) # Prefer the primary name for ordering q = q.outerjoin(DnsRecord, (Fqdn, DnsRecord.fqdn_id == Fqdn.id), DnsDomain) q = q.options(contains_eager('primary_name'), contains_eager('primary_name.fqdn'), contains_eager('primary_name.fqdn.dns_domain')) q = q.reset_joinpoint() q = q.order_by(Fqdn.name, DnsDomain.name, Switch.label) return q.all()
def render(self, session, **arguments): q = session.query(Domain) q = q.options(undefer('comments'), joinedload('owner'), undefer('tracked_branch.comments')) q = q.order_by(Domain.name) return q.all()
def render(self, session, **arguments): q = session.query(NetworkEnvironment) q = q.options(undefer('comments'), joinedload('dns_environment'), undefer('dns_environment.comments'), joinedload('location')) q = q.order_by(NetworkEnvironment.name) return q.all()
def news_query(self): query = object_session(self).query(News) query = query.filter(Page.parent == self) query = query.order_by(desc(Page.created)) query = query.options(undefer('created')) query = query.options(undefer('content')) return query
def render(self, session, network_environment, **arguments): options = [undefer("comments"), joinedload("dns_environment"), undefer("dns_environment.comments")] dbnet_env = NetworkEnvironment.get_unique(session, network_environment, compel=True, query_options=options) return dbnet_env
def _get_current_posts(self, load_json=False): if load_json: results = self.db.query(FacebookPost).\ options(undefer('imported_blob'), undefer('attachment_blob')).\ filter_by(source=self).all() return {x.source_post_id: x for x in results} results = self.db.query(FacebookPost).filter_by( source=self).all() return {x.source_post_id: x for x in results}
def serialize_category_chain(category, include_children=False, include_parents=False): data = {'category': serialize_category(category, with_path=True)} if include_children: data['subcategories'] = [serialize_category(c, with_path=True, parent_path=data['category']['path']) for c in category.children] if include_parents: query = (category.parent_chain_query .options(undefer('deep_events_count'), undefer('deep_children_count'))) data['supercategories'] = [serialize_category(c, with_path=True, child_path=data['category']['path']) for c in query] return data
def session_transactions(request, info, session, sessionid): s = session\ .query(Session)\ .options(undefer('transactions.total'), undefer('transactions.discount_total'), joinedload('transactions.payments'))\ .get(int(sessionid)) if not s: raise Http404 return ('session-transactions.ajax', {'session': s})
def transaction(request, info, session, transid): t = session\ .query(Transaction)\ .options(subqueryload_all('payments'), joinedload('lines.department'), joinedload('lines.user'), undefer('total'), undefer('discount_total'))\ .get(int(transid)) if not t: raise Http404 return ('transaction.html', {'transaction': t, 'tillobject': t})
def render(self, session, network_environment, **arguments): q = session.query(NetworkEnvironment) q = q.options(undefer('comments'), joinedload('dns_environment'), undefer('dns_environment.comments'), joinedload('location')) if network_environment: q = q.filter_by(name=network_environment) location = get_location(session, **arguments) if location: q = q.filter_by(location=location) q = q.order_by(NetworkEnvironment.name) return q.all()
def _category_query_options(cls): children_strategy = subqueryload('children') children_strategy.load_only('id', 'parent_id', 'title', 'protection_mode') children_strategy.subqueryload('acl_entries') children_strategy.undefer('deep_children_count') children_strategy.undefer('deep_events_count') children_strategy.undefer('has_events') return (children_strategy, load_only('id', 'parent_id', 'title', 'protection_mode'), subqueryload('acl_entries'), undefer('deep_children_count'), undefer('deep_events_count'), undefer('has_events'), undefer('chain'))
def render(self, session, logger, share, **arguments): self.deprecated_command( "show_nas_disk_share is deprecated, please use " "show_share instead.", logger=logger, **arguments ) q = session.query(Share) if share: q = q.filter_by(name=share) q = q.join(ClusterResource, EsxCluster) q = q.options(undefer(Share.disk_count)) q = q.options(undefer(Share.machine_count)) result = q.all() if share and not result: raise NotFoundException("Share %s does not exist." % share) return ServiceShareList(result)
def objs(self): """Return a :type:`list` of recently updated :class:`TopicContainer` associated with this :class:`Board`. Only the top 10 recent topics are returned by this method. Deferred columns :attr:`post_count` and :attr:`posted_at` are undeferred to prevent N+1 queries when used for listing topics. Each object will have this board as its parent and will resolve to a `/slug/topic_id` URL. """ # TODO: Optimize me, accessing posts here requires 1+N queries. # However we can't eager-loading posts since posts will have to # be stored in memory which should be very expensive for large # board (maximum of 1000 posts x 10 topics, or 10k rows.) return self._query_posts(lambda q: q.limit(10).\ options(undefer('post_count'), undefer('posted_at')))
def test_deep_options(self): users, items, order_items, Order, Item, User, orders = (self.tables.users, self.tables.items, self.tables.order_items, self.classes.Order, self.classes.Item, self.classes.User, self.tables.orders) mapper(Item, items, properties=dict( description=deferred(items.c.description))) mapper(Order, orders, properties=dict( items=relationship(Item, secondary=order_items))) mapper(User, users, properties=dict( orders=relationship(Order, order_by=orders.c.id))) sess = create_session() q = sess.query(User).order_by(User.id) l = q.all() item = l[0].orders[1].items[1] def go(): eq_(item.description, 'item 4') self.sql_count_(1, go) eq_(item.description, 'item 4') sess.expunge_all() l = q.options(undefer('orders.items.description')).all() item = l[0].orders[1].items[1] def go(): eq_(item.description, 'item 4') self.sql_count_(0, go) eq_(item.description, 'item 4')
def test_options(self): """Options on a mapper to create deferred and undeferred columns""" orders, Order = self.tables.orders, self.classes.Order mapper(Order, orders) sess = create_session() q = sess.query(Order).order_by(Order.id).options(defer('user_id')) def go(): q.all()[0].user_id self.sql_eq_(go, [ ("SELECT orders.id AS orders_id, " "orders.address_id AS orders_address_id, " "orders.description AS orders_description, " "orders.isopen AS orders_isopen " "FROM orders ORDER BY orders.id", {}), ("SELECT orders.user_id AS orders_user_id " "FROM orders WHERE orders.id = :param_1", {'param_1':1})]) sess.expunge_all() q2 = q.options(undefer('user_id')) self.sql_eq_(q2.all, [ ("SELECT orders.id AS orders_id, " "orders.user_id AS orders_user_id, " "orders.address_id AS orders_address_id, " "orders.description AS orders_description, " "orders.isopen AS orders_isopen " "FROM orders ORDER BY orders.id", {})])
def _process(self): query = (Category.query .filter(Category.id.in_(c.id for c in self.user.favorite_categories)) .options(undefer('chain_titles'))) categories = sorted([(cat, truncate_path(cat.chain_titles[:-1], chars=50)) for cat in query], key=lambda c: (c[0].title, c[1])) return WPUser.render_template('favorites.html', 'favorites', user=self.user, favorite_categories=categories)
def test_crud(self): from clld.db.migration import Connection migration = Connection(DBSession) assert len(list(migration.select(common.Identifier))) == 0 pk = migration.insert( common.Identifier, id='iso-csw', name='csw', type=common.IdentifierType.iso.value) assert migration.pk(common.Identifier, 'iso-csw') == pk assert len(list(migration.select(common.Identifier))) == 1 identifier = DBSession.query(common.Identifier)\ .options(undefer('*')).get(pk) assert identifier.active assert identifier.version == 1 assert identifier.created assert identifier.updated migration.update(common.Identifier, [('name', 'cea')], pk=pk) DBSession.refresh(identifier) assert identifier.name == 'cea' migration.delete(common.Identifier, pk=pk) self.assertRaises(InvalidRequestError, DBSession.refresh, identifier)
def _get_collection_range( self, offset, limit ): """Get the objects in a certain range of the collection :return: an iterator over the objects in the collection, starting at offset, until limit """ from sqlalchemy import orm from sqlalchemy.exc import InvalidRequestError query = self.get_query_getter()().offset(offset).limit(limit) # # undefer all columns displayed in the list, to reduce the number # of queries # columns_to_undefer = [] for field_name, _field_attributes in self._columns: property = None try: property = self.admin.mapper.get_property( field_name, ) except InvalidRequestError: # # If the field name is not a property of the mapper # pass if property and isinstance(property, orm.properties.ColumnProperty): columns_to_undefer.append( field_name ) if columns_to_undefer: options = [ orm.undefer( field_name ) for field_name in columns_to_undefer ] query = query.options( *options ) return query.all()
def _subnet_find(context, limit, sorts, marker, page_reverse, fields, defaults=None, provider_query=False, **filters): query = context.session.query(models.Subnet) model_filters = _model_query(context, models.Subnet, filters, query) if defaults: invert_defaults = False if INVERT_DEFAULTS in defaults: invert_defaults = True defaults.pop(0) if filters and invert_defaults: query = query.filter(and_(not_(models.Subnet.id.in_(defaults)), and_(*model_filters))) elif not provider_query and filters and not invert_defaults: query = query.filter(or_(models.Subnet.id.in_(defaults), and_(*model_filters))) elif not invert_defaults: query = query.filter(models.Subnet.id.in_(defaults)) else: query = query.filter(*model_filters) if "join_dns" in filters: query = query.options(orm.joinedload(models.Subnet.dns_nameservers)) if "join_routes" in filters: query = query.options(orm.joinedload(models.Subnet.routes)) if "join_pool" in filters: query = query.options(orm.undefer('_allocation_pool_cache')) return paginate_query(query, models.Subnet, limit, sorts, marker)
def sequence_modal_protein(sequence_id): """ Returns the protein sequence in a modal :param sequence_id: ID of the sequence :return: Response with the fasta file """ current_sequence = Sequence.query\ .options(undefer('coding_sequence'))\ .options(noload('xrefs'))\ .get_or_404(sequence_id) return render_template('modals/sequence.html', sequence=current_sequence, coding=False)
def expression_profile_plot_json(profile_id): """ Generates a JSON object that can be rendered using Chart.js line plots :param profile_id: ID of the profile to render """ current_profile = ExpressionProfile.query.options( undefer('profile')).get_or_404(profile_id) data = json.loads(current_profile.profile) plot = prepare_expression_profile(data, show_sample_count=True, ylabel='TPM') return Response(json.dumps(plot), mimetype='application/json')
def __call__(self): lecture = self.db.query(models.Lecture).options( undefer('tutorials.student_count')).get(self.lecture_id) times = lecture.prepareTimePreferences(user=self.request.user) subscribed_tutorial = self.request.user.tutorials.filter( Tutorial.lecture_id == self.lecture_id).first() form = TutorLectureAuthSignIn(self.request) self.request.javascript.append('unsubscribe_modal_helpers.js') return { 'lecture': lecture, 'subscribed_tutorial': subscribed_tutorial, 'times': times, 'prefs': utils.preferences, 'form': form }
def test_state_deferred_to_col(self): """Behavioral test to verify the current activity of loader callables.""" mapper(User, users, properties={'name': deferred(users.c.name)}) sess = create_session() u1 = sess.query(User).options(undefer(User.name)).first() assert 'name' not in attributes.instance_state(u1).callables # mass expire, the attribute was loaded, # the attribute gets the callable sess.expire(u1) assert isinstance( attributes.instance_state(u1).callables['name'], state.InstanceState) # load it, callable is gone u1.name assert 'name' not in attributes.instance_state(u1).callables # mass expire, attribute was loaded but then deleted, # the callable goes away - the state wants to flip # it back to its "deferred" loader. sess.expunge_all() u1 = sess.query(User).options(undefer(User.name)).first() del u1.name sess.expire(u1) assert 'name' not in attributes.instance_state(u1).callables # single attribute expire, the attribute gets the callable sess.expunge_all() u1 = sess.query(User).options(undefer(User.name)).first() sess.expire(u1, ['name']) assert isinstance( attributes.instance_state(u1).callables['name'], state.InstanceState)
def get_newest_sources_index(session, repo, suite): ''' Create an index of the most recent source packages, using the source-UUID of source packages. ''' from laniakea.utils import compare_versions res_spkgs = {} spkgs = session.query(SourcePackage) \ .options(undefer(SourcePackage.version)) \ .options(undefer(SourcePackage.architectures)) \ .filter(SourcePackage.suites.any(ArchiveSuite.id == suite.id)) \ .filter(SourcePackage.repo_id == repo.id) \ .order_by(SourcePackage.version.desc()) \ .all() for pkg in spkgs: epkg = res_spkgs.get(pkg.uuid) if epkg and compare_versions(pkg.version, epkg.version) <= 0: # don't override if the existing version is newer continue res_spkgs[pkg.uuid] = pkg return res_spkgs
def _clone_contribs(self, new_event): query = (Contribution.query.with_parent(self.old_event) .options(undefer('_last_friendly_subcontribution_id'), joinedload('own_venue'), joinedload('own_room').lazyload('*'), joinedload('session'), joinedload('session_block').lazyload('session'), joinedload('type'), subqueryload('acl_entries'), subqueryload('subcontributions').joinedload('references'), subqueryload('references'), subqueryload('person_links'), subqueryload('field_values'))) for old_contrib in query: self._contrib_map[old_contrib] = self._create_new_contribution(new_event, old_contrib)
def query(self, session, period): q = session.query(OccasionNeed) q = q.filter( OccasionNeed.occasion_id.in_( session.query(Occasion.id).filter( Occasion.period_id == period.id).subquery())) q = q.join(Occasion) q = q.options( joinedload(OccasionNeed.occasion).joinedload(Occasion.activity)) q = q.options( joinedload(OccasionNeed.occasion).joinedload(Occasion.period)) q = q.options(undefer('*')) q = q.order_by(Occasion.order, OccasionNeed.name) return q
def _demo_tool(self, game_id): game = self.db.query(Game).options(undefer('board')).get(game_id) if not game.is_demo or not game.demo_control == self.user: raise InvalidPlayerError() if not game.board.current_node: game.board.add_edits([], [], []) node = game.board.current_node yield game, node game.apply_board_change() self._publish_game_update(game)
def index(self, page=1, **kw): """List podcasts with pagination. :param page: Page number, defaults to 1. :type page: int :rtype: Dict :returns: podcasts The list of :class:`~mediacore.model.podcasts.Podcast` instances for this page. """ podcasts = DBSession.query(Podcast)\ .options(orm.undefer('media_count'))\ .order_by(Podcast.title) return dict(podcasts=podcasts)
class RHCategoryIcon(RHDisplayCategoryBase): _category_query_options = undefer('icon'), def _check_access(self): # Category icons are always public pass def _process(self): if not self.category.has_icon: raise NotFound metadata = self.category.icon_metadata return send_file(metadata['filename'], BytesIO(self.category.icon), mimetype=metadata['content_type'], conditional=True)
def _process(self): q = request.args['q'].lower() query = (Category.query.filter(Category.title_matches(q)).options( undefer('deep_children_count'), undefer('deep_events_count'), undefer('has_events'), joinedload('acl_entries'))) if session.user: # Prefer favorite categories query = query.order_by( Category.favorite_of.any(favorite_category_table.c.user_id == session.user.id).desc()) # Prefer exact matches and matches at the beginning, then order by category title and if # those are identical by the chain titles query = (query.order_by( (db.func.lower(Category.title) == q).desc(), db.func.lower(Category.title).startswith(q).desc(), db.func.lower(Category.title), Category.chain_titles)) total_count = query.count() query = query.limit(10) return jsonify_data(categories=[ serialize_category(c, with_favorite=True, with_path=True) for c in query ], total_count=total_count, flash=False)
def test_load_only_w_deferred(self): orders, Order = self.tables.orders, self.classes.Order mapper(Order, orders, properties={"description": deferred(orders.c.description)}) sess = create_session() q = sess.query(Order).options(load_only("isopen", "description"), undefer("user_id")) self.assert_compile( q, "SELECT orders.description AS orders_description, " "orders.id AS orders_id, " "orders.user_id AS orders_user_id, " "orders.isopen AS orders_isopen FROM orders")
def dispatch_request(self): form = UploadImport() if form.validate_on_submit(): f = form.file.data file = ImportFile() last_import = (self.session.query(ImportFile).order_by( ImportFile.last_activity.desc()).first()) # Set those attributes from the latest imported file, defaulting to the column default for attr in [ "imported_external_object_type", "platform_id", "fields" ]: setattr(file, attr, getattr(last_import, attr, getattr(file, attr, None))) file.upload(file=f) self.session.add(file) self.session.commit() f.save(str(file.path)) return redirect(url_for(".show_import_file", id=file.id)) query = self.query(ImportFile).options( undefer(ImportFile.last_activity)) ordering = parse_ordering(request.args.get("ordering", None, str)) ordering_key, ordering_direction = (ordering if ordering != (None, None) else ("date", "desc")) query = apply_ordering( { "date": ImportFile.last_activity, "filename": ImportFile.filename, None: ImportFile.id, }, query, key=ordering_key, direction=ordering_direction, ) ctx = {} ctx["ordering"] = request.args.get("ordering", None, str) ctx["page"] = query.paginate() ctx["upload_form"] = form return render_template("imports/list.html", **ctx)
def _copy_dataset_merge(self, ds, source_session, dest_session, table_class, incver, cb=None): from sqlalchemy.orm import noload, undefer i = [0] # ? Why are we doing this? options = [noload('*')] if table_class == File: options.append(undefer('contents')) objects = [] for o in source_session.query(table_class).filter( table_class.d_vid == ds.vid).options(*options).all(): if incver: o = o.incver() objects.append(o.__dict__) i[0] += 1 if i[0] % 20000 == 0: dest_session.bulk_insert_mappings(table_class, objects) dest_session.commit() if cb: cb('Copy dataset', i[0]) else: self.logger.info( "Copied {} records of table {} for {}".format( i[0], table_class, ds.vid)) objects = [] if objects: dest_session.bulk_insert_mappings(table_class, objects) dest_session.commit() self.logger.info("Copied {} records of table {} for {}".format( i[0], table_class, ds.vid))
def bin_package_details(suite_name, name): with session_scope() as session: suite = session.query(ArchiveSuite) \ .filter(ArchiveSuite.name == suite_name) \ .one_or_none() if not suite: abort(404) bpkgs = session.query(BinaryPackage) \ .options(joinedload(BinaryPackage.architecture)) \ .options(joinedload(BinaryPackage.bin_file)) \ .options(undefer(BinaryPackage.version)) \ .filter(BinaryPackage.name == name) \ .filter(BinaryPackage.suites.any(ArchiveSuite.id == suite.id)) \ .order_by(BinaryPackage.version.desc()).all() if not bpkgs: abort(404) suites = [ s[0] for s in session.query(ArchiveSuite.name.distinct()).filter( ArchiveSuite.bin_packages.any( BinaryPackage.name == name)).all() ] architectures = set() bpkg_rep = bpkgs[0] # the first package is always the most recent one for bpkg in bpkgs: architectures.add(bpkg.architecture) if not bpkg_rep: abort(404) dep_issues = session.query(DebcheckIssue) \ .filter(DebcheckIssue.package_type == PackageType.BINARY) \ .filter(DebcheckIssue.suite_id == suite.id) \ .filter(DebcheckIssue.package_name == bpkg_rep.name) \ .filter(DebcheckIssue.package_version == bpkg_rep.version) \ .all() return render_template('packages/bin_details.html', pkg=bpkg_rep, pkgs_all=bpkgs, pkg_suite_name=suite_name, suites=suites, architectures=architectures, dep_issues=dep_issues, naturalsize=humanize.naturalsize, make_linked_dependency=make_linked_dependency, link_for_bin_package_id=link_for_bin_package_id)
def get(self, game_id): game = self.db.query(Game).options(undefer('board')).get(game_id) if not game: raise HTTPError(404) filename = '%s-%s-%s.sgf' % (game.created_at.date().isoformat(), game.white_display, game.black_display) self.set_header('Content-Type', 'application/x-go-sgf; charset=utf-8') self.set_header('Content-Disposition', 'attachment; filename="%s"' % filename) self.enable_cors() self.write(game_to_sgf(game))
def reprocess_content(self): """ Allows re-parsing all content as if it were imported for the first time but without re-hitting the source, or changing the object ids. Call when a code change would change the representation in the database """ session = self.db emails = session.query(Email.id).filter(Email.source_id == self.id) for email_id in emails: with transaction.manager: email_ = Email.get(email_id).options( joinedload_all(Email.parent), undefer(Email.imported_blob)) (email_object, dummy, error) = self.parse_email(email_.imported_blob, email_) with transaction.manager: self.thread_mails(emails)
def single_team(team_id): team_query = Team.query.options(undefer('espys_total')).get(team_id) if team_query is None: return {} games = (DB.session.query(Game).filter( or_(Game.away_team_id == team_id, Game.home_team_id == team_id)).all()) espy_total = (team_query.espys_total if team_query.espys_total is not None else 0) team = { team_id: { 'wins': 0, 'losses': 0, 'games': 0, 'ties': 0, 'runs_for': 0, "runs_against": 0, 'hits_for': 0, 'hits_allowed': 0, 'name': str(team_query), 'espys': espy_total } } for game in games: # loop through each game scores = game.summary() if game.away_team_id == team_id: score = scores['away_score'] hits = scores['away_bats'] opp = scores['home_score'] opp_hits = scores['home_bats'] else: score = scores['home_score'] hits = scores['home_bats'] opp = scores['away_score'] opp_hits = scores['away_bats'] if score > opp: team[team_id]['wins'] += 1 elif score < opp: team[team_id]['losses'] += 1 elif scores['home_bats'] + scores['away_bats'] > 0: team[team_id]['ties'] += 1 team[team_id]['runs_for'] += score team[team_id]['runs_against'] += opp team[team_id]['hits_for'] += hits team[team_id]['hits_allowed'] += opp_hits team[team_id]['games'] += 1 return team
def testEspsysTotal(self): """Test that espys total work""" league = self.add_league(str(uuid.uuid1())) sponsor = self.add_sponsor(str(uuid.uuid1())) team = self.add_team(color="Blacl", sponsor=sponsor, league=league) # award the team 3 espys points in from two different # transactions espy_one = self.add_espys(team, sponsor, points=1) espy_two = self.add_espys(team, sponsor, points=2) # assert that their total is 3 points self.assertEqual( Team.query.options(undefer('espys_total')).get( team['team_id']).espys_total, espy_one['points'] + espy_two['points'], "Expecting 3 espys points to be awarded")
def process_email_ids(self, email_ids): self.set_status(ReaderStatus.READING) self.refresh_source() log.info("Processing messages from IMAP: %d "% (len(email_ids))) for email_id in email_ids: self.import_email(email_id) if self.status != ReaderStatus.READING: break # We imported mails, we need to re-thread self.source.db.flush() # Rethread emails globally (sigh) emails = self.source.db.query(Post).filter_by( discussion_id=self.source.discussion_id ).options(undefer(ImportedPost.imported_blob)).all() AbstractMailbox.thread_mails(emails) self.source.db.commit()
def get_packages(self, collection, only_new=False): """ Get packages eligible for resolution in new repo for given collection. :param: collection collection for which packages are requested :param: only_new whether to consider only packages that weren't resolved yet """ query = (self.db.query(Package).filter(~Package.blocked).filter( Package.tracked).filter(~Package.skip_resolution).filter( Package.collection_id == collection.id).filter( Package.last_complete_build_id != None).options( joinedload(Package.last_build)).options( undefer('last_build.dependency_keys'))) if only_new: query = query.filter(Package.resolved == None) return query.all()
def transaction(request, info, session, transid): # XXX now that we store transaction descriptions explicitly, we # may not need to joinedload lines.stockref.stockitem.stocktype # and this will end up as a much simpler query. Wait until old # transaction data has been migrated, though, because the web # interface is still used to look at old data. t = session\ .query(Transaction)\ .options(subqueryload_all('payments'), joinedload('lines.department'), joinedload_all('lines.stockref.stockitem.stocktype'), joinedload('lines.user'), undefer('total'))\ .get(int(transid)) if not t: raise Http404 return ('transaction.html', {'transaction': t})
def fetch_queue_chunk(self, chunk_size, queue_no): logger.info("looking for new jobs") text_query_pattern = ''' with refresh_queue as ( select id from {queue_table} where queue_no = {queue_no} and started is null order by priority desc nulls last, finished asc nulls first, rand limit {chunk_size} for update skip locked ) update {queue_table} queue_rows_to_update set started = now() from refresh_queue where refresh_queue.id = queue_rows_to_update.id returning refresh_queue.id; ''' text_query = text_query_pattern.format( chunk_size=chunk_size, queue_table=self.table_name(None), queue_no=queue_no ) logger.info("the queue query is:\n{}".format(text_query)) job_time = time() row_list = db.engine.execute(text(text_query).execution_options(autocommit=True)).fetchall() object_ids = [row[0] for row in row_list] logger.info("got {} ids, took {} seconds".format(len(object_ids), elapsed(job_time))) job_time = time() q = db.session.query(Pub).options( orm.undefer('*') ).filter(Pub.id.in_(object_ids)) objects = q.all() logger.info("got pub objects in {} seconds".format(elapsed(job_time))) return objects
def _get_eagerloaded_query(self, *args, **kwargs): options = [] try: has_creator = 'owner' in self._get_schema_class().opts.fields except AttributeError: has_creator = False if has_creator: # APIs for objects with metadata always return the creator's # username. Do a joinedload to prevent doing one query per object # (n+1) problem options.append(joinedload( getattr(self.model_class, 'creator')).load_only('username')) query = self._get_base_query(*args, **kwargs) options += [joinedload(relationship) for relationship in self.get_joinedloads] options += [undefer(column) for column in self.get_undefer] return query.options(*options)
def defer_fields(self, Model, cursor, fields): opts = [] for field in Model.__table__.columns.keys(): if field in fields or field in ('id', ): opts.append(undefer(getattr(Model, field))) else: opts.append(defer(getattr(Model, field))) relation_properties = filter( lambda p: isinstance(p, properties.RelationshipProperty), Model.__mapper__.iterate_properties) for field in relation_properties: if field.key in fields: cursor = cursor.options( joinedload_all(getattr(Model, field.key))) if opts: cursor = cursor.options(*opts) return cursor
def _get_objects(self, object_query): """Get a set of objects described in the filters.""" object_name = object_query["object_name"] expression = object_query.get("filters", {}).get("expression") if expression is None: return set() object_class = self.object_map[object_name] query = object_class.query query = query.options(undefer('updated_at')) requested_permissions = object_query.get("permissions", "read") with benchmark("Get permissions: _get_objects > _get_type_query"): type_query = self._get_type_query(object_class, requested_permissions) if type_query is not None: query = query.filter(type_query) with benchmark("Parse filter query: _get_objects > _build_expression"): filter_expression = self._build_expression( expression, object_class, ) if filter_expression is not None: query = query.filter(filter_expression) if object_query.get("order_by"): with benchmark("Sorting: _get_objects > order_by"): query = self._apply_order_by( object_class, query, object_query["order_by"], ) with benchmark("Apply limit"): limit = object_query.get("limit") if limit: matches, total = self._apply_limit(query, limit) else: matches = query.all() total = len(matches) object_query["total"] = total if hasattr(flask.g, "similar_objects_query"): # delete similar_objects_query for the case when several queries are # POSTed in one request, the first one filters by similarity and the # second one doesn't but tries to sort by __similarity__ delattr(flask.g, "similar_objects_query") return matches
def __generate(species_id, method_id, condition): """ :param species_id: internal ID of species :param method_id: internal ID of the method :param condition: Condition to be exported :return: output """ yield "Sequence\tAliases\tDescription\tAvg.Expression\tMin.Expression\tMax.Expression\n" profiles = ExpressionProfile.query.filter(ExpressionProfile.species_id == species_id). \ filter(ExpressionProfile.sequence_id is not None). \ options(undefer('profile')).order_by(ExpressionProfile.probe.asc()).all() condition_tissue = ConditionTissue.query. \ filter(ConditionTissue.expression_specificity_method_id == method_id).first() condition_tissue_data = json.loads( condition_tissue.data) if condition_tissue is not None else None for p in profiles: try: data = json.loads(p.profile) if condition_tissue is None: # main profile is used, directly export values values = data["data"][condition] else: # summarized profile is selected, convert and export converted_profile = ExpressionProfile.convert_profile( condition_tissue_data, data, use_means=True) values = converted_profile["data"][condition] aliases = p.sequence.aliases if p.sequence.aliases is not None else "" description = p.sequence.description if p.sequence.description is not None else "" yield "%s\t%s\t%s\t%f\t%f\t%f\n" % (p.sequence.name, aliases, description, mean(values), min(values), max(values)) except Exception as e: print( "An error occured exporting a profile with conditions %s for species %d." % (condition, species_id), file=sys.stderr) print(e, file=sys.stderr)
def dispatch_request(self): logging.warn("Dispatching request") if request.method == "POST": if request.form.get("action") == "refresh": self.celery.send_task( "matcher.tasks.object.refresh_attributes", []) flash("Attributes are being refreshed") ctx = {} ctx["external_object_stats"] = defaultdict( int, { key.name: value for (key, value) in self.query( ExternalObject.type, func.count( ExternalObject.id)).group_by(ExternalObject.type) }, ) ctx["platforms_stats"] = defaultdict( int, { key.name: value for (key, value) in self.query(Platform.type, func.count( Platform.id)).group_by(Platform.type) }, ) ctx["object_link_count"] = self.query(ObjectLink).count() now = datetime.datetime.utcnow() def successful_scrap(timedelta): return (self.query(Scrap).filter( Scrap.date >= (now - timedelta)).filter( Scrap.status == ScrapStatus.SUCCESS).count()) ctx["recent_scraps_count"] = { "day": successful_scrap(datetime.timedelta(days=1)), "week": successful_scrap(datetime.timedelta(weeks=1)), "month": successful_scrap(datetime.timedelta(days=30)), "year": successful_scrap(datetime.timedelta(days=365)), } ctx["last_scraps"] = self.query(Scrap).options( joinedload(Scrap.platform), undefer(Scrap.links_count))[-9:] return render_template("home.html", **ctx)
def findActivitySessions(self, patient, activityAlias): """ Find all sessions for ``patient`` of activity type ``activityAlias`` Returns an array of sessions or an empty array if none were found. """ activity = self.db.query(Activity).filter( Activity.alias == activityAlias).one() sessions = self.db.query(Session)\ .options(orm.undefer('score'))\ .options(orm.eagerload('zone'))\ .options(orm.eagerload('activity'))\ .options(orm.eagerload('patient'))\ .join(Patient).filter(Patient.id == patient.id)\ .join(Activity).filter(Session.activity == activity)\ .order_by(Session.timestamp.desc())\ .limit(500).all() return sessions
def update_fn(cls, method_name, obj_id_list, shortcut_data=None, index=1): # we are in a fork! dispose of our engine. # will get a new one automatically db.engine.dispose() start = time() q = db.session.query(cls).options(orm.undefer('*')).filter( cls.id.in_(obj_id_list)) obj_rows = q.all() num_obj_rows = len(obj_rows) print "{repr}.{method_name}() got {num_obj_rows} objects in {elapsed}sec".format( repr=cls.__name__, method_name=method_name, num_obj_rows=num_obj_rows, elapsed=elapsed(start)) for count, obj in enumerate(obj_rows): start_time = time() if obj is None: return None method_to_run = getattr(obj, method_name) print u"\n***\n{count}: starting {repr}.{method_name}() method".format( count=count + (num_obj_rows * index), repr=obj, method_name=method_name) if shortcut_data: method_to_run(shortcut_data) else: method_to_run() print u"finished {repr}.{method_name}(). took {elapsed}sec".format( repr=obj, method_name=method_name, elapsed=elapsed(start_time, 4)) commit_success = safe_commit(db) if not commit_success: print u"COMMIT fail" db.session.remove() # close connection nicely return None # important for if we use this on RQ