async def read_wallet_groups(): wallets = (WalletGroupAddress.select( WalletGroupAddress.wallet, fn.COUNT(WalletGroupAddress.address)).group_by( WalletGroupAddress.wallet).order_by( fn.COUNT(WalletGroupAddress.address).desc()).paginate(0, 10)) return list( map( lambda wallet: { 'wallet': wallet.wallet, 'address_count': wallet.count }, wallets))
def user_invitation_summary(): # noqa: D103 summary = UserInvitation.select( fn.COUNT(UserInvitation.id).alias("total"), fn.COUNT(UserInvitation.confirmed_at).alias("confirmed")).first() unconfirmed_invitations = UserInvitation.select().where( UserInvitation.confirmed_at >> None).order_by(UserInvitation.created_at) return render_template( "invitation_summary.html", title="User Invitation Summary", summary=summary, unconfirmed_invitations=unconfirmed_invitations)
def execute(self): if self.type == FOLLOWING_COUNT: count = Relation.select(fn.COUNT(Relation.id)).where( Relation.from_user_id == self.user_id ).scalar() elif self.type == FOLLOWER_COUNT: count = Relation.select(fn.COUNT(Relation.id)).where( Relation.to_user_id == self.user_id ).scalar() else: count = 0 return self.mk_response(count)
def get_stats(): stats = { 'ticket_type_distribution': { 'adult': 0, 'student': 0, }, 'location_distribution': [], 'users_distribution': [], 'movies_distribution': [], } # Get ticket type tickets = Tickets.select(Tickets.ticket_type, fn.COUNT( Tickets.ticket_type).alias('count')).group_by( Tickets.ticket_type) for ticket in tickets: stats['ticket_type_distribution'][ticket.ticket_type] = ticket.count # Location distribution tickets = Tickets.select(Tickets.row, Tickets.col, fn.COUNT(Tickets.id).alias('count')).group_by( Tickets.row, Tickets.col) tickets = [{ 'row': ticket.row, 'col': ticket.col, 'count': ticket.count } for ticket in tickets] stats['location_distribution'] = tickets # Users distribution tickets = Tickets.select(Tickets.name, fn.COUNT(Tickets.id).alias('count')).group_by( Tickets.cnp) tickets = [{ 'name': ticket.name, 'count': ticket.count } for ticket in tickets] stats['users_distribution'] = tickets # Movies distribution tickets = Tickets.select(Tickets.movies_id, fn.COUNT(Tickets.id).alias('count')).group_by( Tickets.movies_id) tickets = [{ 'name': ticket.movies_id.title, 'count': ticket.count } for ticket in tickets] stats['movies_distribution'] = tickets return jsonify(stats)
def test_org_switch(client): """Test organisation switching.""" user = User.get( orcid=User.select(fn.COUNT(User.orcid).alias( "id_count"), User.orcid).group_by(User.orcid).having( fn.COUNT(User.orcid) > 1).objects().first().orcid) user_orgs = UserOrg.select().join( User, on=UserOrg.user).where(User.orcid == user.orcid) new_org = Organisation.select().where( Organisation.id.not_in([uo.org_id for uo in user_orgs])).first() UserOrg.create(user=user, org=new_org, affiliations=0) resp = client.login(user, follow_redirects=True) assert user.email.encode() in resp.data assert len(user.org_links) > 1 assert current_user == user # Nothing changes if it is the same organisation uo = user.user_orgs.where(UserOrg.org_id == user.organisation_id).first() resp = client.get(f"/select/user_org/{uo.id}", follow_redirects=True) assert User.get(user.id).organisation_id == user.organisation_id assert user.email.encode() in resp.data # The current org changes if it's a dirrerent org on the list uo = user.user_orgs.where(UserOrg.org_id != user.organisation_id).first() resp = client.get(f"/select/user_org/{uo.id}", follow_redirects=True) assert User.get(user.id).organisation_id != user.organisation_id assert User.get(user.id).organisation_id == uo.org_id for ol in user.org_links: assert ol.org.name.encode() in resp.data if UserOrg.get(ol.id).user.id != user.id: next_ol = ol # Shoud be a totally different user account: resp = client.get(f"/select/user_org/{next_ol.id}", follow_redirects=True) next_user = UserOrg.get(next_ol.id).user assert next_user.id != user.id # Non-exiting resp = client.get("/select/user_org/9999999", follow_redirects=True) assert b"Your are not related to this organisation" in resp.data # Wrong organisations uo = UserOrg.select().where( UserOrg.id.in_([uo.id for uo in user.org_links]).__invert__(), UserOrg.org != user.organisation).order_by(UserOrg.id.desc()).first() resp = client.get(f"/select/user_org/{uo.id}", follow_redirects=True) assert b"You cannot switch your user to this organisation" in resp.data
def on_commands_usage(self, event): q = Command.select( fn.COUNT('*'), Command.plugin, Command.command, ).group_by(Command.plugin, Command.command).order_by(fn.COUNT('*').desc()).limit(25) tbl = MessageTable() tbl.set_header('Plugin', 'Command', 'Usage') for count, plugin, command in q.tuples(): tbl.add(plugin, command, count) event.msg.reply(tbl.compile())
def get_blogs_for_user(cls, user): readers = fn.COUNT(BlogParticipiation.id) return (cls.select(Blog, readers.alias("readers")).join( BlogParticipiation, JOIN.LEFT_OUTER).where((BlogParticipiation.user == user) & (Blog.blog_type != 3)).group_by( Blog.id).order_by(readers.desc()))
def get_results(since=timedelta(days=1)): yesterday = datetime.datetime.now() - since d = time.mktime(yesterday.timetuple()) result = LongPosition.select( fn.SUM(LongPosition.profit).alias('total_profit'), fn.SUM(LongPosition.buy_quantity * LongPosition.purchase_price).alias('total_spent'), fn.SUM(LongPosition.fees).alias('total_fees'), fn.COUNT(LongPosition.id).alias('num_positions')).where( # position is closed... (LongPosition.status << [ LongPosition.STATUS__CLOSED_RIDE_PROFIT, LongPosition.STATUS__CLOSED_LOSS ]) & # ...within the last... (LongPosition.date_closed >= d)) return { "profit": result[0].total_profit, "spent": result[0].total_spent, "profit_percentage": result[0].total_profit / result[0].total_spent if result[0].total_profit else Decimal('0.0'), "num_trades": result[0].num_positions, "fees": result[0].total_fees, }
def _find_next_steak(self): # Seems like queries cannot be reused... ## Prepare query to search for the next available steak # Create subquery that counts the number of grills (max 1) to hold a spice subquery = SteakSpice.select( SteakSpice.spice.alias('spice'), fn.COUNT(GrillSpice.id.distinct()).alias('in_use')) subquery = subquery.join(GrillSpice, JOIN.LEFT_OUTER, on=(SteakSpice.spice == GrillSpice.spice)) subquery = subquery.group_by(SteakSpice.spice) # Select all raw steaks without a grill and attach a flag whether any spices are in use query = Steak.select( Steak.id.alias('steak_id'), fn.COALESCE(fn.SUM(subquery.c.in_use), 0).alias('spice_in_use')) query = query.join(SteakGrill, JOIN.LEFT_OUTER) query = query.switch(Steak).join(SteakSpice, JOIN.LEFT_OUTER) query = query.join(subquery, JOIN.LEFT_OUTER, on=subquery.c.spice == SteakSpice.spice).group_by( Steak.id) query = query.where(Steak.status == STATUS_RAW).where( SteakGrill.grill == None) query = query.where(Steak.recipe.in_(self.recipe_names)) # Select the steaks with no spices in use steak_query = Steak.select().join( query, JOIN.INNER, on=(Steak.id == query.c.steak_id)).where(query.c.spice_in_use == 0) # Return oldest steak return steak_query.order_by(Steak.created).first()
def api_stat_site_cate_daily(date): try: date = datetime.strptime(date, '%Y-%m-%d').date() except ValueError or KeyError: return jsonify({'code': 1, 'msg': 'invalid request'}) query = RoomHourlyStat.select( RoomHourlyStat.cate, fn.SUM(RoomHourlyStat.dcount).alias('dsum'), fn.SUM(RoomHourlyStat.gcount).alias('gsum'), fn.SUM(RoomHourlyStat.ucount).alias('usum'), fn.SUM(RoomHourlyStat.income).alias('isum'), fn.COUNT(fn.DISTINCT(RoomHourlyStat.room)).alias('rsum') ).join(Date, on=(RoomHourlyStat.date == Date.date_key)) \ .where(Date.date == date) \ .group_by(RoomHourlyStat.cate) payload = [] for row in query: payload.append({ 'cate': row.cate.name, 'income': row.isum, 'ucount': row.usum, 'gcount': row.gsum, 'dcount': row.dsum, 'rcount': row.rsum }) return jsonify({'code': 0, 'msg': 'success', 'data': payload})
def wait(): uid = session['osm_uid'] if 'osm_uid' in session else 0 isadmin = uid in config.ADMINS nominees = Nominee.select(Nominee, Vote.user.alias('voteuser')).where(Nominee.status == Nominee.Status.CHOSEN).join( Vote, JOIN.LEFT_OUTER, on=((Vote.nominee == Nominee.id) & (Vote.user == uid) & (~Vote.preliminary))).naive() # For admin, populate the dict of votes winners = {x: [0, 0] for x in config.NOMINATIONS} if isadmin or config.STAGE == 'results': votesq = Nominee.select(Nominee.id, Nominee.category, fn.COUNT(Vote.id).alias('num_votes')).where(Nominee.status == Nominee.Status.CHOSEN).join( Vote, JOIN.LEFT_OUTER, on=((Vote.nominee == Nominee.id) & (~Vote.preliminary))).group_by(Nominee.id) votes = {} for v in votesq: votes[v.id] = v.num_votes if v.num_votes > winners[v.category][1]: winners[v.category] = (v.id, v.num_votes) else: votes = None # Count total number of voters total = Vote.select(fn.Distinct(Vote.user)).where(~Vote.preliminary).group_by(Vote.user).count() # Update a link in the description desc = g.lang['stages'][config.STAGE]['description'] desc = desc.replace('{', '<a href="{}">'.format( url_for('static', filename='osmawards2020.txt'))).replace('}', '</a>') # Yay, done return render_template('wait.html', nominees=nominees, description=desc, isadmin=isadmin, votes=votes, stage=config.STAGE, total=total, winners=winners, isresults=config.STAGE == 'results', nominations=config.NOMINATIONS, lang=g.lang)
def stores_list(): # create column called 'num' to count how many warehouse is under the given store stores = Store.select(Store.name, Store.store_id, fn.COUNT(Warehouse.store_id).alias('count')).join( Warehouse, JOIN.LEFT_OUTER).group_by( Store.store_id).order_by(Store.name) return render_template('stores.html', stores=stores)
def list(args: core.Namespace): """ Lists some information about existing lands :param args: :return: """ lands = model.Land.select()\ .join(model.LandDictionary, JOIN.LEFT_OUTER)\ .join(model.Word, JOIN.LEFT_OUTER)\ .switch(model.Land)\ .join(model.Expression, JOIN.LEFT_OUTER)\ .group_by(model.Land.name)\ .order_by(model.Land.name) if lands.count() > 0: for land in lands: exp_stats = model.Expression\ .select(fn.COUNT(model.Expression.id).alias('num'))\ .join(model.Land)\ .where((model.Expression.land == land) & (model.Expression.fetched_at.is_null())) to_crawl = [s.num for s in exp_stats] print("%s - (%s)\n\t%s" % (land.name, land.created_at.strftime("%B %d %Y %H:%M"), land.description)) print("\t%s terms in land dictionary %s" % (land.words.count(), [d.word.term for d in land.words])) print("\t%s expressions in land (%s remaining to crawl)" % (land.expressions.count(), to_crawl[0])) return 1 print("No land created") return 0
def articles_with_multiple_types(*types, **kwargs): """ Given one or many lowercase types (spaces allowed, string), return articles containing those types. Specify op='and' for an AND query that returns articles with ALL types (default) Specify op='or' for an OR query that returns articles with AT LEAST one of types If an invalid op is passed, raises a ValueError. Specify limit=n kwarg to limit return values to n rows. """ # In Python 2, we can't specify *args and named kwargs # we must use *args and **kwargs op = kwargs.pop('op', 'and') limit = kwargs.pop('limit', sys.maxint) if op == 'and': result = (Article.select().join(ArticleType).join(Type).where( Type.type << types).group_by(Article).having( fn.COUNT(Article.id) == len(types)).limit(limit)) elif op == 'or': result = (Article.select().join(ArticleType).join(Type).where( Type.type << types).group_by(Article).limit(limit)) else: raise ValueError( "Illegal op: {}. Valid options are 'and', 'or'.".format(op)) return [x.title for x in result]
def get_stats(): res = Case.select(Conclusion.article, Conclusion.type, fn.COUNT(Case.itemid)).join(ConclusionCase)\ .join(Conclusion)\ .where(Conclusion.article != None)\ .group_by(Conclusion.base_article, Conclusion.type) return res
def account(accountid=None): accounts = models.Account.select() title = 'Accounts' acct_w_opps = models.Account.select( models.Account.id, models.Account.name, fn.COUNT(models.Opportunity.id).alias('open_opps')).join( models.Opportunity, JOIN.LEFT_OUTER, on=((models.Account.id == models.Opportunity.account_id) & (models.Opportunity.stage != 'Closed Won') & (models.Opportunity.stage != 'Lost'))).group_by( models.Account.id, models.Account.name) if accountid != None: title = 'Account Details' account = models.Account.select().where( accountid == models.Account.id).get() opportunities = models.Opportunity.select().where( accountid == models.Opportunity.account_id) return render_template('account-detail.html', account=account, user=g.user._get_current_object(), opportunities=opportunities, title=title) print(acct_w_opps) return render_template('account.html', accounts=accounts, title=title, acct_w_opps=acct_w_opps)
def tagsTop5(): raw = (Tags.select(Tags.title, Tags.description).join(TagsMap).group_by( Tags.id).order_by(fn.COUNT(TagsMap.client_id)).limit(5)) result = [] for q in raw: result.append({"title": q.title, "description": q.description}) return json.jsonify(result)
def get_seen(cls, timediff): if timediff: timediff = datetime.utcnow() - timediff pokemon_count_query = (Pokemon.select( Pokemon.pokemon_id, fn.COUNT(Pokemon.pokemon_id).alias('count'), fn.MAX(Pokemon.disappear_time).alias('lastappeared')).where( Pokemon.disappear_time > timediff).group_by( Pokemon.pokemon_id).alias('counttable')) query = (Pokemon.select( Pokemon.pokemon_id, Pokemon.disappear_time, Pokemon.latitude, Pokemon.longitude, pokemon_count_query.c.count).join( pokemon_count_query, on=(Pokemon.pokemon_id == pokemon_count_query.c.pokemon_id )).distinct().where( Pokemon.disappear_time == pokemon_count_query.c.lastappeared).dicts()) # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append() gc.disable() pokemons = [] total = 0 for p in query: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) pokemons.append(p) total += p['count'] # Re-enable the GC. gc.enable() return {'pokemon': pokemons, 'total': total}
def get_duplicates(): from models import Entry from peewee import fn, SQL return (Entry.select(Entry.hash_str, fn.COUNT( Entry.hash_str).alias('occurrence')).group_by( Entry.hash_str).having(SQL('occurrence') > 1))
def init_bar_overview(self) -> None: """ Init overview table if not exists. """ s: ModelSelect = (DbBarData.select( DbBarData.symbol, DbBarData.exchange, DbBarData.interval, fn.COUNT(DbBarData.id).alias("count")).group_by( DbBarData.symbol, DbBarData.exchange, DbBarData.interval)) for data in s: overview = DbBarOverview() overview.symbol = data.symbol overview.exchange = data.exchange overview.interval = data.interval overview.count = data.count start_bar: DbBarData = (DbBarData.select().where( (DbBarData.symbol == data.symbol) & (DbBarData.exchange == data.exchange) & (DbBarData.interval == data.interval)).order_by( DbBarData.datetime.asc()).first()) overview.start = start_bar.datetime end_bar: DbBarData = (DbBarData.select().where( (DbBarData.symbol == data.symbol) & (DbBarData.exchange == data.exchange) & (DbBarData.interval == data.interval)).order_by( DbBarData.datetime.desc()).first()) overview.end = end_bar.datetime overview.save()
def events_summary(): has_clip = request.args.get("has_clip", type=int) has_snapshot = request.args.get("has_snapshot", type=int) clauses = [] if not has_clip is None: clauses.append((Event.has_clip == has_clip)) if not has_snapshot is None: clauses.append((Event.has_snapshot == has_snapshot)) if len(clauses) == 0: clauses.append((1 == 1)) groups = (Event.select( Event.camera, Event.label, fn.strftime("%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")).alias("day"), Event.zones, fn.COUNT(Event.id).alias("count"), ).where(reduce(operator.and_, clauses)).group_by( Event.camera, Event.label, fn.strftime("%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")), Event.zones, )) return jsonify([e for e in groups.dicts()])
def get_seen(cls, timediff): if timediff: timediff = datetime.utcnow() - timediff pokemon_count_query = (Pokemon .select(Pokemon.pokemon_id, fn.COUNT(Pokemon.pokemon_id).alias('count'), fn.MAX(Pokemon.disappear_time).alias('lastappeared') ) .where(Pokemon.disappear_time > timediff) .group_by(Pokemon.pokemon_id) .alias('counttable') ) query = (Pokemon .select(Pokemon.pokemon_id, Pokemon.disappear_time, Pokemon.latitude, Pokemon.longitude, pokemon_count_query.c.count) .join(pokemon_count_query, on=(Pokemon.pokemon_id == pokemon_count_query.c.pokemon_id)) .where(Pokemon.disappear_time == pokemon_count_query.c.lastappeared) .dicts() ) pokemons = [] total = 0 for p in query: p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) pokemons.append(p) total += p['count'] return {'pokemon': pokemons, 'total': total}
def __process_request(raw_data: str): """ Обработать запрашиваемые параметры и создать итерируемый экземляр orm модели :param raw_data: запрашиваемые get (через &) параметры :return: итерируемый экземляр orm модели """ data = __normalize_get_request(raw_data) recipes = Recipe.select().where(Recipe.active) # если есть ключ и в нем есть значение тогда накладывай условия выбора. if 'name' in data and data['name']: recipes = recipes.where(Recipe.name.iregexp(data['name'])) if 'hashtags' in data and data['hashtags']: recipes = recipes.where(Recipe.hashtags.iregexp(data['hashtags'])) if 'user' in data and data['user']: recipes = recipes.join(User).where(User.nickname == data['user']) if 'dish_type' in data and data['dish_type']: recipes = recipes.join( DishType, on=(Recipe.dish_type == DishType.id)).where( DishType.dish_type_name == data['dish_type']) if 'image' in data and data['image']: recipes = recipes.where(Recipe.image.is_null(False)) if 'order' in data: if data['order'] == 'date': recipes = recipes.order_by(Recipe.date) elif data['order'] == 'name': recipes = recipes.order_by(Recipe.name) elif data['order'] == 'id': recipes = recipes.order_by(Recipe.id) elif data['order'] == 'likes': likes_count = fn.COUNT(Likes.recipe).alias('count') recipes = recipes.select_extend(likes_count).\ join(Likes, JOIN.LEFT_OUTER, on=(Likes.recipe == Recipe.id)).\ group_by(Recipe.id).order_by(-likes_count) return recipes
def get_bar_data_statistics(self) -> List[Dict]: """""" s = ( self.class_bar.select( self.class_bar.symbol, self.class_bar.exchange, self.class_bar.interval, fn.COUNT(self.class_bar.id).alias("count") ).group_by( self.class_bar.symbol, self.class_bar.exchange, self.class_bar.interval ) ) result = [] for data in s: result.append({ "symbol": data.symbol, "exchange": data.exchange, "interval": data.interval, "count": data.count }) return result
def get_seen(timediff): if timediff: timediff = datetime.utcnow() - timedelta(hours=timediff) # Note: pokemon_id+0 forces SQL to ignore the pokemon_id index # and should use the disappear_time index and hopefully # improve performance query = (Pokemon.select( (Pokemon.pokemon_id + 0).alias('pokemon_id'), Pokemon.form, fn.COUNT((Pokemon.pokemon_id + 0)).alias('count'), fn.MAX(Pokemon.disappear_time).alias('disappear_time')).where( Pokemon.disappear_time > timediff).group_by( (Pokemon.pokemon_id + 0), Pokemon.form).dicts()) # Performance: disable the garbage collector prior to creating a # (potentially) large dict with append(). gc.disable() pokemon = [] total = 0 for p in query: pokemon.append(p) total += p['count'] gc.enable() return {'pokemon': pokemon, 'total': total}
def chapter_edit(req): namespace = req.matchdict.get('namespace') slug = req.matchdict.get('slug') chapter_slug = req.matchdict.get('chapter_slug') project = get_project(namespace, user=req.user) site = get_site(slug, project=project) if site.type != 'publication': raise HTTPNotFound publication = site.instance try: chapter = Chapter.select( Chapter, fn.COUNT(Article.id).alias('articles_count')).join( Article, JOIN.LEFT_OUTER).join( Publication, on=(Chapter.publication_id == publication.id)).where( Chapter.slug == chapter_slug).group_by( Chapter.id).order_by(Chapter.name.asc(), ).get() except Chapter.DoesNotExist: raise HTTPNotFound return dict(project=project, site=site, publication=publication, chapter=chapter)
def get_assets_with_comment_stats(id, page: int=1, limit: int=20, after=None): assets = Asset.select( Asset, fn.COUNT(PendingComment.id).alias('total_pending_comments') ).where( Asset.publication == id ).join( PendingComment, JOIN.LEFT_OUTER ).order_by( fn.COUNT(PendingComment.id).desc(), Asset.created.desc() ).group_by( Asset.id ).paginate(page, limit) asset_ids = [asset.id for asset in assets] assets_with_rejected_comments_count = RejectedComment.select( RejectedComment.asset_id, fn.COUNT(RejectedComment.id).alias('total_rejected_comments') ).where( RejectedComment.asset_id << asset_ids ).group_by(RejectedComment.asset_id) rejected_comment_counts = { asset.asset_id: asset.total_rejected_comments for asset in assets_with_rejected_comments_count } assets_with_comments_count = Comment.select( Comment.asset_id, fn.COUNT(Comment.id).alias('total_comments') ).where( Comment.asset_id << asset_ids ).group_by(Comment.asset_id) comment_counts = { asset.id: asset.total_comments for asset in assets_with_comments_count } return [ { 'comments_count': comment_counts.get(asset.id, 0), 'pending_comments_count': asset.total_pending_comments, 'rejected_comments_count': rejected_comment_counts.get(asset.id, 0), 'commenting_closed': asset.commenting_closed, **asset.to_dict() } for asset in assets ]
def execute(self): counts = [] for item_id in self.item_ids: count = Love.select(fn.COUNT( Love.id)).where(Love.item_id == item_id).scalar() counts.append(count) return self.mk_response(counts)
def get_count(vectorName: str): try: return Vector.select(fn.COUNT(Vector.vectorMax).alias("count")).where( Vector.vectorName % f"*{vectorName}*").get() except Vector.DoesNotExist: return None except Exception as ex: raise ex
def get_system_count(rh_account): """Get count of nonstale, nonoptouted, evaluated user systems""" # pylint: disable=singleton-comparison return SystemPlatform.select(fn.COUNT(SystemPlatform.id).alias('count')) \ .where((SystemPlatform.rh_account_id == rh_account) & ((SystemPlatform.last_evaluation.is_null(False)) | (SystemPlatform.advisor_evaluated.is_null(False))) & ((SystemPlatform.opt_out == False) & (SystemPlatform.stale == False) & (SystemPlatform.when_deleted.is_null(True)))) \ .first().count # noqa: E712