def query(self): query = powa_get_all_tbl_sample(bindparam("server")) query = query.alias() c = query.c def sum_per_sec(col): ts = extract("epoch", greatest(c.mesure_interval, '1 second')) return (sum(col) / ts).label(col.name) from_clause = query cols = [ c.srvid, extract("epoch", c.ts).label("ts"), case([(sum(c.idx_scan + c.seq_scan) == 0, 0)], else_=cast(sum(c.idx_scan), Numeric) * 100 / sum(c.idx_scan + c.seq_scan)).label("idx_ratio"), sum_per_sec(c.idx_scan), sum_per_sec(c.seq_scan), sum_per_sec(c.n_tup_ins), sum_per_sec(c.n_tup_upd), sum_per_sec(c.n_tup_hot_upd), sum_per_sec(c.n_tup_del), sum_per_sec(c.vacuum_count), sum_per_sec(c.autovacuum_count), sum_per_sec(c.analyze_count), sum_per_sec(c.autoanalyze_count) ] return (select(cols).select_from(from_clause).where( c.datname == bindparam("database")).group_by( c.srvid, c.ts, c.mesure_interval).order_by(c.ts).params(samples=100))
def top_players(self): """Top players on this server by total playing time.""" try: top_players_q = DBSession.query( fg.row_number().over( order_by=expr.desc(func.sum(PlayerGameStat.alivetime))).label("rank"), Player.player_id, Player.nick, func.sum(PlayerGameStat.alivetime).label("alivetime"))\ .filter(Player.player_id == PlayerGameStat.player_id)\ .filter(Game.game_id == PlayerGameStat.game_id)\ .filter(Game.server_id == self.server_id)\ .filter(Player.player_id > 2)\ .filter(PlayerGameStat.create_dt > (self.now - timedelta(days=self.lifetime)))\ .order_by(expr.desc(func.sum(PlayerGameStat.alivetime)))\ .group_by(Player.nick)\ .group_by(Player.player_id) if self.last: top_players_q = top_players_q.offset(self.last) if self.limit: top_players_q = top_players_q.limit(self.limit) top_players = top_players_q.all() except Exception as e: log.debug(e) raise HTTPNotFound return top_players
def execute(self, message, user, params): alliance = Alliance.load(params.group(1)) if alliance is None: message.reply("No alliance matching '%s' found"%(params.group(1),)) return Q = session.query(sum(Planet.value), sum(Planet.score), sum(Planet.size), sum(Planet.xp), count(), Planet.race) Q = Q.join(Planet.intel) Q = Q.filter(Planet.active == True) Q = Q.filter(Intel.alliance==alliance) Q = Q.group_by(Intel.alliance_id, Planet.race) Q = Q.order_by(asc(Planet.race)) result = Q.all() if len(result) < 1: message.reply("No planets in intel match alliance %s"%(alliance.name,)) return prev=[] for value, score, size, xp, members, race in result: reply="%s %s Val(%s)" % (members,race,self.num2short(value/members),) reply+=" Score(%s)" % (self.num2short(score/members),) reply+=" Size(%s) XP(%s)" % (size/members,self.num2short(xp/members),) prev.append(reply) reply="Demographics for %s: "%(alliance.name,)+ ' | '.join(prev) message.reply(reply)
def get_top_scorers(self): """Top players by score. Shared by all renderers.""" cutoff = self.now - timedelta(days=self.lifetime) cutoff = self.now - timedelta(days=120) top_scorers_q = DBSession.query( fg.row_number().over(order_by=expr.desc(func.sum(PlayerGameStat.score))).label("rank"), Player.player_id, Player.nick, func.sum(PlayerGameStat.score).label("total_score"))\ .filter(Player.player_id == PlayerGameStat.player_id)\ .filter(Game.game_id == PlayerGameStat.game_id)\ .filter(Game.map_id == self.map_id)\ .filter(Player.player_id > 2)\ .filter(PlayerGameStat.create_dt > cutoff)\ .order_by(expr.desc(func.sum(PlayerGameStat.score)))\ .group_by(Player.nick)\ .group_by(Player.player_id) if self.last: top_scorers_q = top_scorers_q.offset(self.last) if self.limit: top_scorers_q = top_scorers_q.limit(self.limit) top_scorers = top_scorers_q.all() return top_scorers
def query(self): bs = block_size.c.block_size query = powa_get_bgwriter_sample(bindparam("server")) query = query.alias() c = query.c def sum_per_sec(col): ts = extract("epoch", greatest(c.mesure_interval, '1 second')) return (sum(col) / ts).label(col.name) from_clause = query cols = [ c.srvid, extract("epoch", c.ts).label("ts"), sum(c.checkpoints_timed).label("checkpoints_timed"), sum(c.checkpoints_req).label("checkpoints_req"), sum_per_sec(c.checkpoint_write_time), sum_per_sec(c.checkpoint_sync_time), sum_per_sec(mulblock(c.buffers_checkpoint)), sum_per_sec(mulblock(c.buffers_clean)), sum_per_sec(c.maxwritten_clean), sum_per_sec(mulblock(c.buffers_backend)), sum_per_sec(c.buffers_backend_fsync), sum_per_sec(mulblock(c.buffers_alloc)) ] return (select(cols).select_from(from_clause).group_by( c.srvid, c.ts, bs, c.mesure_interval).order_by(c.ts).params(samples=100))
def get_grouped_production(self, criterion, group_type): activity = [] if group_type in ['state', 'county']: values = [] names = [] group = [] values.append(Activity.state) group.append(Activity.state) names.append("state") if group_type == 'county': values.append(Activity.county) group.append(Activity.county) names.append("county") values.append(sqlfunc.sum(Activity.production)) names.append("production") values.append(sqlfunc.sum(Activity.average_employees)) names.append("average_employees") values.append(sqlfunc.sum(Activity.labor_hours)) names.append("labor_hours") results = DBSession.query(*values).filter(*criterion).group_by(*group).all() for row in results: d = {} for name, value in zip(names, row): d[name] = value activity.append(d) else: raise Exception("Invalid group type: %s" % group_type) return activity
def _story_build_summary_query(): # first create a subquery for task statuses select_items = [] select_items.append(Story) select_items.append( expr.case( [(func.sum(Task.status.in_( ['todo', 'inprogress', 'review'])) > 0, 'active'), ((func.sum(Task.status == 'merged')) > 0, 'merged')], else_='invalid' ).label('status') ) for task_status in Task.TASK_STATUSES: select_items.append(expr.cast( func.sum(Task.status == task_status), Integer ).label(task_status)) select_items.append(expr.null().label('task_statuses')) result = select(select_items, None, expr.Join(Story, Task, onclause=Story.id == Task.story_id, isouter=True)) \ .group_by(Story.id) \ .alias('story_summary') return result
def get_top_players_by_time(server_id): try: leaderboard_lifetime = int(request.registry.settings['xonstat.leaderboard_lifetime']) except: leaderboard_lifetime = 30 leaderboard_count = 10 recent_games_count = 20 try: top_players = DBSession.query(PlayerGameStat.player_id, func.sum(PlayerGameStat.alivetime)).\ filter(Game.server_id == server_id).\ filter(Game.create_dt > (datetime.utcnow() - timedelta(days=leaderboard_lifetime))).\ filter(PlayerGameStat.game_id == Game.game_id).\ filter(PlayerGameStat.player_id > 2).\ order_by(expr.desc(func.sum(PlayerGameStat.alivetime))).\ group_by(PlayerGameStat.player_id).\ limit(leaderboard_count).all() player_ids = [] player_total = {} for (player_id, total) in top_players: player_ids.append(player_id) player_total[player_id] = total top_players = [] players = DBSession.query(Player.player_id, Player.nick).filter(Player.player_id.in_(player_ids)).all() for (player_id, nick) in players: top_players.append( (player_id, nick, player_total[player_id]) ) top_players.sort(key=lambda tup: -tup[2]) except Exception as e: top_players = [] raise e return top_players
def fleet_overview(self): if self.scantype not in ("J",): return from sqlalchemy.sql.functions import min, sum f = aliased(FleetScan) a = aliased(FleetScan) d = aliased(FleetScan) Q = session.query( f.landing_tick, f.landing_tick - min(Scan.tick), count(a.id), coalesce(sum(a.fleet_size), 0), count(d.id), coalesce(sum(d.fleet_size), 0), ) Q = Q.join(f.scan) Q = Q.filter(f.scan == self) Q = Q.outerjoin((a, and_(a.id == f.id, a.mission.ilike("Attack")))) Q = Q.outerjoin((d, and_(d.id == f.id, d.mission.ilike("Defend")))) Q = Q.group_by(f.landing_tick) Q = Q.order_by(asc(f.landing_tick)) return Q.all()
def get(self, database, query): bs = block_size.c.block_size stmt = powa_getstatdata_detailed_db() stmt = stmt.where( (column("datname") == bindparam("database")) & (column("queryid") == bindparam("query"))) stmt = stmt.alias() from_clause = outerjoin(powa_statements, stmt, and_(powa_statements.c.queryid == stmt.c.queryid, powa_statements.c.dbid == stmt.c.dbid)) c = stmt.c rblk = mulblock(sum(c.shared_blks_read).label("shared_blks_read")) wblk = mulblock(sum(c.shared_blks_hit).label("shared_blks_hit")) stmt = (select([ column("query"), sum(c.calls).label("calls"), sum(c.runtime).label("runtime"), rblk, wblk, (rblk + wblk).label("total_blks")]) .select_from(from_clause) .where(powa_statements.c.queryid == bindparam("query")) .group_by(column("query"), bs)) value = self.execute(stmt, params={ "query": query, "database": database, "from": self.get_argument("from"), "to": self.get_argument("to") }) if value.rowcount < 1: self.render("xhr.html", content="No data") return self.render("database/query/detail.html", stats=value.first())
def get_top_scorers(self): """Top players by score. Shared by all renderers.""" cutoff = self.now - timedelta(days=self.lifetime) try: top_scorers_q = DBSession.query( fg.row_number().over(order_by=expr.desc(func.sum(PlayerGameStat.score))).label("rank"), Player.player_id, Player.nick, func.sum(PlayerGameStat.score).label("total_score"))\ .filter(Player.player_id == PlayerGameStat.player_id)\ .filter(Game.game_id == PlayerGameStat.game_id)\ .filter(Game.map_id == self.map_id)\ .filter(Player.player_id > 2)\ .filter(PlayerGameStat.create_dt > cutoff)\ .order_by(expr.desc(func.sum(PlayerGameStat.score)))\ .group_by(Player.nick)\ .group_by(Player.player_id) if self.last: top_scorers_q = top_scorers_q.offset(self.last) if self.limit: top_scorers_q = top_scorers_q.limit(self.limit) top_scorers = top_scorers_q.all() return top_scorers except Exception as e: log.debug(e) raise HTTPNotFound
def get_notebook_contents(nid: int, start: int, end: int, uid=None): session = Session() try: notebook = session.query(Notebook).filter_by(nid=nid) if not notebook: return dbmsg("31") contents = session.query(NotebookContent).filter_by(nid=nid).\ order_by(NotebookContent.cid)[start:end] res = [] for content in contents: content_info = { "cid": content.cid, "uid": content.uid, "time": str(content.time), "content": content.content, "imgs": content.imgs, "floor": content.floor, "ref": content.ref, "author": { "id": [content.author.uid, content.author.rid], "name": content.author.name, "avatar": content.author.avatar } } upvote_count = session.query(VoteHistory).\ filter_by(cid = content.cid, vote_type=0).count() downvote_count = session.query(VoteHistory).\ filter_by(cid=content.cid, vote_type=1).count() reward_sum = session.query(f.sum(VoteHistory.amount)).\ filter_by(cid=content.cid, vote_type=2).scalar() # scalar() 返回的是 Decimal,需要转换成int才可以转JSON if reward_sum is None: reward_sum = 0 else: reward_sum = int(reward_sum) if uid: my_upvote = session.query(VoteHistory).\ filter_by(cid = content.cid, uid=uid, vote_type=0).count() my_downvote = session.query(VoteHistory).\ filter_by(cid=content.cid, vote_type=1, uid=uid).count() my_reward = session.query(f.sum(VoteHistory.amount)).\ filter_by(cid=content.cid, uid=uid, vote_type=2).scalar() if my_reward is None: my_reward = 0 else: my_reward = int(my_reward) else: my_upvote = 0 my_downvote = 0 my_reward = 0 content_info["upvote"] = [upvote_count, my_upvote] content_info["downvote"] = [downvote_count, my_downvote] content_info["reward"] = [reward_sum, my_reward] res.append(content_info) return dbmsg(data=res) except: traceback.print_exc() finally: session.close()
def sum_votes(self, prop): yes = session.query(sum(Vote.carebears)).filter_by( prop_id=prop.id, vote="yes").scalar() no = session.query(sum(Vote.carebears)).filter_by(prop_id=prop.id, vote="no").scalar() veto = session.query(Vote).filter_by(prop_id=prop.id, vote="veto").count() return yes or 0, no or 0, veto or 0
def restaurant_votes(): results = db.session.query(RestaurantVote.restaurant_id, sqlfunc.sum(RestaurantVote.weight)).filter( RestaurantVote.date == datetime.date.today()).group_by( RestaurantVote.restaurant_id).order_by(sqlfunc.sum(RestaurantVote.weight).desc()).all() logger.debug(str(results)) restaurants = [dict(name=Restaurant.query.get(r[0]).name, votes=r[1]) for r in results] return flask.render_template("restaurant_votes.html", restaurants=restaurants)
def fetch_transactions(self, start_time, end_time): session = self.__session() try: transactions = session.query( Transaction.tx_site, Transaction.tx_code, functions.count(Transaction.tx_time).label('tx_size'), functions.min(Transaction.tx_time).label('tx_time_min'), functions.max(Transaction.tx_time).label('tx_time_max'), functions.sum(Transaction.tx_inst).label('tx_net_inst'), functions.sum(Transaction.tx_fund).label('tx_net_fund'), functions.sum(func.abs( Transaction.tx_inst)).label('tx_grs_inst'), functions.sum(func.abs( Transaction.tx_fund)).label('tx_grs_fund'), ).filter(Transaction.tx_time >= start_time, Transaction.tx_time < end_time).group_by( Transaction.tx_site, Transaction.tx_code, ).subquery() inst = aliased(Evaluation, name='ev_inst') fund = aliased(Evaluation, name='ev_fund') results = session.query(transactions, Product, inst, fund).join( Product, and_( Product.pr_site == transactions.c.tx_site, Product.pr_code == transactions.c.tx_code, )).outerjoin( inst, and_( inst.ev_site == Product.pr_site, inst.ev_unit == Product.pr_inst, )).outerjoin( fund, and_( fund.ev_site == Product.pr_site, fund.ev_unit == Product.pr_fund, )).all() finally: session.close() dto = namedtuple( 'TransactionDto', ('tx_site', 'tx_code', 'tx_size', 'tx_time_min', 'tx_time_max', 'tx_net_inst', 'tx_net_fund', 'tx_grs_inst', 'tx_grs_fund', 'product', 'ev_inst', 'ev_fund')) return [dto(*r) for r in results]
def main_index(request): leaderboard_count = 10 recent_games_count = 32 # top players by score top_players = DBSession.query(Player.player_id, Player.nick, func.sum(PlayerGameStat.score)).\ filter(Player.player_id == PlayerGameStat.player_id).\ filter(Player.player_id > 2).\ order_by(expr.desc(func.sum(PlayerGameStat.score))).\ group_by(Player.nick).\ group_by(Player.player_id).all()[0:10] top_players = [(player_id, html_colors(nick), score) \ for (player_id, nick, score) in top_players] for i in range(leaderboard_count-len(top_players)): top_players.append(('-', '-', '-')) # top servers by number of total players played top_servers = DBSession.query(Server.server_id, Server.name, func.count()).\ filter(Game.server_id==Server.server_id).\ order_by(expr.desc(func.count(Game.game_id))).\ group_by(Server.server_id).\ group_by(Server.name).all()[0:10] for i in range(leaderboard_count-len(top_servers)): top_servers.append(('-', '-', '-')) # top maps by total times played top_maps = DBSession.query(Map.map_id, Map.name, func.count(Game.game_id)).\ filter(Map.map_id==Game.game_id).\ order_by(expr.desc(func.count(Game.game_id))).\ group_by(Map.map_id).\ group_by(Map.name).all()[0:10] for i in range(leaderboard_count-len(top_maps)): top_maps.append(('-', '-', '-')) recent_games = DBSession.query(Game, Server, Map).\ filter(Game.server_id==Server.server_id).\ filter(Game.map_id==Map.map_id).\ order_by(expr.desc(Game.start_dt)).all()[0:recent_games_count] for i in range(recent_games_count-len(recent_games)): recent_games.append(('-', '-', '-')) return {'top_players':top_players, 'top_servers':top_servers, 'top_maps':top_maps, 'recent_games':recent_games, }
def query(self): bs = block_size.c.block_size query = powa_getstatdata_sample("db") query = query.alias() c = query.c return (select([ extract("epoch", c.ts).label("ts"), (sum(c.runtime) / greatest(sum(c.calls), 1)).label("avg_runtime"), total_read(c), total_hit(c) ]).where(c.calls != None).group_by(c.ts, bs).order_by( c.ts).params(samples=100))
def total_amount(): (balance,) = session \ .query(sqlfunc.sum(tx.model.Account.balance) .label('balance_amount')).one_or_none() (saving,) = session \ .query(sqlfunc.sum(tx.model.Account.saving) .label('saving_amount')).one_or_none() if balance == None: balance = 0.0 if saving == None: saving = 0.0 return balance + saving
def query(self): inner_query = powa_getwaitdata_db().alias() c = inner_query.c from_clause = inner_query.join(powa_databases, c.dbid == powa_databases.c.oid) return (select([ powa_databases.c.datname, c.event_type, c.event, sum(c.count).label("counts"), ]).select_from(from_clause).order_by(sum(c.count).desc()).group_by( powa_databases.c.datname, c.event_type, c.event))
def qualstat_getstatdata(): base_query = qualstat_base_statdata() return (select([ column("qualid"), column("queryid").label("queryid"), func.to_json(column("quals")).label("quals"), sum(column("count")).label("count"), sum(column("nbfiltered")).label("nbfiltered"), case([(sum(column("count")) == 0, 0)], else_=sum(column("nbfiltered")) / cast(sum(column("count")), Numeric)).label("filter_ratio") ]).select_from(base_query).group_by(column("qualid"), literal_column("queryid"), column("quals")))
def query(self): bs = block_size.c.block_size query = powa_getstatdata_sample("db") query = query.alias() c = query.c return (select([ extract("epoch", c.ts).label("ts"), (sum(c.runtime) / greatest(sum(c.calls), 1)).label("avg_runtime"), total_read(c), total_hit(c)]) .where(c.calls != None) .group_by(c.ts, bs) .order_by(c.ts) .params(samples=100))
def get_avg_run_time(user_id): """funtion that caculates average pace by dividing the total mileage and time""" total_mileage = db.session.query( functions.sum(TrainingLog.training_mileage)).filter( TrainingLog.user_id == user_id).first()[0] total_time = db.session.query(functions.sum( TrainingLog.training_run_time)).filter( TrainingLog.user_id == user_id).first()[0] if total_mileage is None and total_time is None: return '00:00:00' else: return convert_deltatime_to_time(total_time / total_mileage)
def leaderboard_query(session, start_date, until_date): """ This is, admittedly, a really ugly sql query. Query optimization has not been performed, but it shouldn't be anything more complicated than a few indices. Good luck. """ #start_date = datetime.strptime(start_date, '%Y-%m-%d') #until_date = datetime.strptime(until_date_str, '%Y-%m-%d') subq = session\ .query( Instance, InstanceType, User, case([(Instance.end_date != None, Instance.end_date)], else_=now()).label('stop_date'))\ .join(Instance.user)\ .join(Instance.type)\ .subquery() uptime_column = case( [ (subq.c.created_date > until_date, 0), (subq.c.stop_date < start_date, 0) ], else_=extract('epoch', func.LEAST(subq.c.stop_date, cast(until_date, DateTime)) - func.GREATEST(subq.c.created_date, cast(start_date, DateTime)) ) ) print subq.c subq2 = session.query( subq.c.user_id, sum(case([(uptime_column == 0, 0)], else_=1)).label('instance_count'), #func.count(subq.c.instance_id).label('instance_count'), sum(uptime_column).label('uptime'), sum(uptime_column * subq.c.cpu).label('cpu_seconds') ).group_by(subq.c.user_id).order_by(desc('cpu_seconds')).subquery() q = session.query( subq2.c.user_id, subq2.c.uptime, subq2.c.cpu_seconds, subq2.c.instance_count, User.username, User.is_staff, User.name ).join(User) return q
def query(self): # Fetch the base query for sample, and filter them on the database bs = block_size.c.block_size subquery = powa_getstatdata_sample("db") # Put the where clause inside the subquery subquery = subquery.where(column("datname") == bindparam("database")) query = subquery.alias() c = query.c return (select([ to_epoch(c.ts), (sum(c.runtime) / greatest(sum(c.calls), 1.)).label("avg_runtime"), total_read(c), total_hit(c) ]).where(c.calls != None).group_by(c.ts, bs).order_by( c.ts).params(samples=100))
def galaxy(self, message, user, params): galaxy = Galaxy.load(*params.group(1,3)) if galaxy is None: message.alert("No galaxy with coords %s:%s" % params.group(1,3)) return Q = session.query(sum(Planet.value), sum(Planet.score), sum(Planet.size), sum(Planet.xp), count(), Planet.race) Q = Q.filter(Planet.galaxy == galaxy) Q = Q.filter(Planet.active == True) Q = Q.group_by(Planet.race) Q = Q.order_by(asc(Planet.race)) result = Q.all() self.execute(message, "%s:%s" % (galaxy.x, galaxy.y,), result)
def get_ten_biggest_mums(self, rec): Q = session.query(User.name, sum(Cookie.howmany).label("gac")) Q = Q.join(Cookie.giver) Q = Q.filter(Cookie.receiver == rec) Q = Q.group_by(User.name) Q = Q.order_by(desc("gac")) return Q[:10]
def get_list(cls, **kw): # SELECT client.clientid, job_bytes, max_job FROM client # LEFT JOIN (SELECT job.clientid, SUM(job.jobbytes) AS job_bytes FROM job # GROUP BY job.clientid) AS vsota ON vsota.clientid = client.clientid # LEFT JOIN (SELECT job.clientid, MAX(job.schedtime) AS max_job FROM job # GROUP BY job.clientid) AS last_job ON last_job.clientid = client.clientid; sum_stmt = Job.query\ .with_entities(Job.clientid, func.sum(Job.jobbytes).label('job_sumvolbytes'))\ .group_by(Job.clientid)\ .subquery('stmt_sub') last_stmt = Job.query\ .with_entities(Job.clientid, func.max(Job.starttime).label('job_maxschedtime')).filter(Job.jobstatus == 'T')\ .group_by(Job.clientid)\ .subquery('stmt_max') objects = cls.query.with_entities(Client, 'job_sumvolbytes', 'job_maxschedtime', func.count(Job.jobid).label('num_jobs'))\ .outerjoin(Job, Client.clientid == Job.clientid)\ .outerjoin(sum_stmt, sum_stmt.c.clientid == Client.clientid)\ .outerjoin(last_stmt, last_stmt.c.clientid == Client.clientid)\ .group_by(cls, 'job_sumvolbytes', 'job_maxschedtime')\ .all() # ugly hack since sqlite returns strings for job_maxschedtime # TODO: report upstream to sqlalchemy if DBSession.bind.dialect.name == 'sqlite': def convert_datetime(l): if l.job_maxschedtime: l.job_maxschedtime = datetime.datetime.strptime(l.job_maxschedtime, '%Y-%m-%d %H:%M:%S') return l objects = map(convert_datetime, objects) return objects
def testing_function_2(query_module): models_module = sqlalchemy_models query = query_module.get_query(sum(models_module.Author.id)) rows = query.all() result = map(extract_row, rows) return str(result)
def report_root_ids(connection, root_ids): rep = [] subq = sa.alias(find_children(root_ids)) q = (sa.select([ model.paths.c.uid.label('uid'), model.paths.c.gid.label('gid'), safunc.count().label('inodes'), safunc.coalesce(safunc.sum(model.paths.c.size), 0).label('size'), safunc.min(model.paths.c.last_seen).label('last seen'), ]).select_from( model.paths.join(subq, subq.c.id == model.paths.c.id)).group_by( model.paths.c.uid, model.paths.c.gid).order_by(sa.desc('size'))) for u in connection.execute(q): u = dict(u) u['user'] = pwd.getpwuid(u['uid']).pw_name u['cn'] = pwd.getpwuid(u['uid']).pw_gecos u['group'] = grp.getgrgid(u['gid']).gr_name if u['last seen'] is not None: u['last seen'] = datetime.fromtimestamp(u['last seen']) rep.append(u) return rep
def dailyCaloriesRead(user_id): query = db.session.query( cast(Route.created_at, Date).label('Date'), sum(Route.calories).label('total_calories'), sum(Route.distance).label('total_distance') )\ .filter(Route.user_id == user_id)\ .group_by( cast(Route.created_at, Date) )\ .order_by( cast(Route.created_at, Date) ) return query.all()
def query(self): # Fetch the base query for sample, and filter them on the database bs = block_size.c.block_size subquery = powa_getstatdata_sample("db") # Put the where clause inside the subquery subquery = subquery.where(column("datname") == bindparam("database")) query = subquery.alias() c = query.c return (select([ to_epoch(c.ts), (sum(c.runtime) / greatest(sum(c.calls), 1.)).label("avg_runtime"), total_read(c), total_hit(c)]) .where(c.calls != None) .group_by(c.ts, bs) .order_by(c.ts) .params(samples=100))
def total_by_codename(cls, codenames): """Return total lifetime pageviews (or clicks) for given codename(s).""" codenames = tup(codenames) # uses hour totals to get the most up-to-date count q = (Session.query(cls.codename, sum( cls.pageview_count)).filter(cls.interval == "hour").filter( cls.codename.in_(codenames)).group_by(cls.codename)) return list(q)
def get_list(cls, **kw): return cls.query.with_entities(Storage, func.count(Media.mediaid).label('num_volumes'), #func.count(Job.jobid).label('num_jobs'), #func.count(Client.clientid).label('num_clients'), func.sum(Media.volbytes).label('total_backup_size'))\ .outerjoin('medias')\ .group_by(cls)
def query(self): # Working from the waitdata detailed_db base query inner_query = powa_getwaitdata_detailed_db(bindparam("server")) inner_query = inner_query.alias() c = inner_query.c ps = powa_statements columns = [ c.srvid, c.queryid, ps.c.query, c.event_type, c.event, sum(c.count).label("counts") ] from_clause = inner_query.join(ps, (ps.c.queryid == c.queryid) & (ps.c.dbid == c.dbid)) return (select(columns).select_from(from_clause).where( c.datname == bindparam("database")).group_by( c.srvid, c.queryid, ps.c.query, c.event_type, c.event).order_by(sum(c.count).desc()))
def get_report_by_date(self, _from: int, to: int): order_ids = self.db.session\ .query(Order.id).filter(Order.date.between(_from, to)).all() result = self.db.session\ .query(OrderItem.fruit_name, sum(OrderItem.amount))\ .filter(OrderItem.order_id.in_(order_ids))\ .group_by(OrderItem.fruit_name).all() return dict(result)
def admin_lbs_per_agency(): year = datetime.datetime.today().year if 'year' in request.args: try: year = int(request.args['year']) except: flash('Could not parse year.', 'error') agencies = {model.Agency.DONOR: {}, model.Agency.RECIPIENT: {}} q = model.db.session.query(model.Agency.name, model.Agency.agency_type, extract('month', model.DriverDailyRoute.date), sum(model.DriverStop.prepared), sum(model.DriverStop.produce), sum(model.DriverStop.dairy), sum(model.DriverStop.raw_meat), sum(model.DriverStop.perishable), sum(model.DriverStop.dry_goods), sum(model.DriverStop.bread)).\ join(model.DriverStop, model.DriverDailyRoute).\ filter(extract('year', model.DriverDailyRoute.date) == year).\ group_by(model.Agency.name, extract('month', model.DriverDailyRoute.date)) for r in q: agency = r[0] if 'aggregate' in request.args: agency = agency.split(',')[0] type = r[1] month = int(r[2]) if not agency in agencies[type]: agencies[type][agency] = \ { 'Prepared': [0] * 12, 'Produce': [0] * 12, 'Dairy': [0] * 12, 'Raw Meat': [0] * 12, 'Perishable': [0] * 12, 'Dry Goods': [0] * 12, 'Bread': [0] * 12, 'TOTAL': [0] * 12, } r = r[3:] agencies[type][agency]['Prepared'][month - 1] += r[0] if r[0] else 0 agencies[type][agency]['Produce'][month - 1] += r[1] if r[1] else 0 agencies[type][agency]['Dairy'][month - 1] += r[2] if r[2] else 0 agencies[type][agency]['Raw Meat'][month - 1] += r[3] if r[3] else 0 agencies[type][agency]['Perishable'][month - 1] += r[4] if r[4] else 0 agencies[type][agency]['Dry Goods'][month - 1] += r[5] if r[5] else 0 agencies[type][agency]['Bread'][month - 1] += r[6] if r[6] else 0 agencies[type][agency]['TOTAL'][month - 1] += sum_results(r) #remove loop if category support is required for recipients for agency in agencies[model.Agency.RECIPIENT]: dlist = [] for cat in agencies[model.Agency.RECIPIENT][agency]: if cat != 'TOTAL': dlist.append(cat) for cat in dlist: del agencies[model.Agency.RECIPIENT][agency][cat] agencies[model.Agency.RECIPIENT] = [(k, agencies[model.Agency.RECIPIENT][k]) for k in sorted(agencies[model.Agency.RECIPIENT])] agencies[model.Agency.DONOR] = [(k, agencies[model.Agency.DONOR][k]) for k in sorted(agencies[model.Agency.DONOR])] return render_template('admin/lbs_per_agency.html', year=year, agencies=agencies, page_title='Pounds Per Agency (' + str(year) + ')')
def admin_lbs_per_city(): year = datetime.datetime.today().year if 'year' in request.args: try: year = int(request.args['year']) except: flash('Could not parse year.', 'error') cities = {} q = model.db.session.query(model.Agency.city, model.Agency.agency_type, extract('month', model.DriverDailyRoute.date), sum(model.DriverStop.prepared), sum(model.DriverStop.produce), sum(model.DriverStop.dairy), sum(model.DriverStop.raw_meat), sum(model.DriverStop.perishable), sum(model.DriverStop.dry_goods), sum(model.DriverStop.bread)).\ join(model.DriverStop, model.DriverDailyRoute).\ filter(extract('year', model.DriverDailyRoute.date) == year).\ group_by(model.Agency.city, model.Agency.agency_type, extract('month', model.DriverDailyRoute.date)) for r in q: city = r[0] atype = r[1] month = int(r[2]) r = r[3:] if not city in cities: cities[city] = [[0, 0] for _ in range(12)] if atype == model.Agency.DONOR: cities[city][month - 1][0] = sum_results(r) else: cities[city][month - 1][1] = sum_results(r) cities = [(k, cities[k]) for k in sorted(cities)] return render_template('admin/lbs_per_city.html', year=year, cities=cities, page_title='Pounds Per City (' + str(year) + ')')
def qualstat_getstatdata(srvid, condition=None): base_query = qualstat_base_statdata() if condition: base_query = base_query.where(condition) return (select([ powa_statements.c.srvid, column("qualid"), powa_statements.c.queryid, column("query"), powa_statements.c.dbid, func.to_json(column("quals")).label("quals"), sum(column("execution_count")).label("execution_count"), sum(column("occurences")).label("occurences"), (sum(column("nbfiltered")) / sum(column("occurences"))).label("avg_filter"), case([(sum(column("execution_count")) == 0, 0)], else_=sum(column("nbfiltered")) / cast(sum(column("execution_count")), Numeric) * 100).label("filter_ratio") ]).select_from( join( base_query, powa_statements, and_(powa_statements.c.queryid == literal_column("pqnh.queryid"), powa_statements.c.srvid == literal_column("pqnh.srvid")), powa_statements.c.srvid == column("srvid"))).group_by( powa_statements.c.srvid, column("qualid"), powa_statements.c.queryid, powa_statements.c.dbid, powa_statements.c.query, column("quals")))
def total_by_codename(cls, codenames): """Return total lifetime pageviews (or clicks) for given codename(s)""" codenames = tup(codenames) # uses hour totals to get the most up-to-date count q = (Session.query(cls.codename, sum(cls.pageview_count)) .filter(cls.interval == "hour") .filter(cls.codename.in_(codenames)) .group_by(cls.codename)) return list(q)
def amount_purchased_tickets(self): """ Retrieves the amount of purchased tickets in ``TicketOrder``s. """ result = Session.query(sum(TicketOrder.amount))\ .filter(TicketOrder.ticket_type_id==self.id)\ .join(Order)\ .filter(Order.status==PURCHASED).first() return result[0]
def getLeadingRestaurant(date): castBotVoteForRestaurant(date, prevent_revote=True) # ensure at least 1 vote restaurant_id = db.session.query(RestaurantVote.restaurant_id).filter( RestaurantVote.date == date).filter( Restaurant.active == True).group_by( RestaurantVote.restaurant_id).order_by( sqlfunc.sum(RestaurantVote.weight).desc()).first()[0] restaurant = Restaurant.query.get(restaurant_id) return restaurant
def amount_ordered_tickets(self): """ Retrieve the amount of tickets in ``TicketOrder``s. """ result = Session.query(coalesce(sum(TicketOrder.amount), 0))\ .join(Order)\ .filter(~Order.status.in_([TIMEOUT, CANCELLED]))\ .filter(TicketOrder.ticket_type_id==self.id).first() return result[0]
def _get_qty(self, itemID, type, date): if type == 'received': return DBSession.query(sum(ReceiveItem.qty)).filter(and_( ReceiveItem.active == 0, ReceiveItem.itemID == itemID, ReceiveItem.createTime >= date, ReceiveItem.createTime < date + timedelta(days=1))).first()[0] or 0 elif type == 'shipped': return DBSession.query(sum(ShipItem.qty)).filter(and_( ShipItem.active == 0, ShipItem.itemID == itemID, ShipItem.createTime >= date, ShipItem.createTime < date + timedelta(days=1))).first()[0] or 0 elif type == 'reserved': return DBSession.query(sum(ReserveItem.qty)).filter(and_( ReserveItem.active == 0, ReserveItem.itemID == itemID, ReserveItem.createTime >= date, ReserveItem.createTime < date + timedelta(days=1))).first()[0] or 0 else: return 0
def team_score_list(): teams = Team.query.filter(Team.role.in_([Team.BLUE, Team.RED])) scoring_teams = [] for team in teams: temp = db.session.query( functions.sum(CheckResult.success * ServiceCheck.value), functions.sum(ServiceCheck.value)) \ .select_from( join(CheckResult, join(ServiceCheck, Service, ServiceCheck.service_id == Service.id), CheckResult.check_id == ServiceCheck.id)) services = temp.filter(Service.team_id == team.id).first() earned = 0 maximum = 0 if services[0]: earned = services[0] maximum = services[1] flag_subquery = db.session.\ query(functions.count(FlagDiscovery.id).label('solve_count'), Flag.value).\ select_from(join(Flag, FlagDiscovery, Flag.id == FlagDiscovery.flag_id)).\ filter(Flag.team_id == team.id).\ group_by(Flag.id).\ subquery('flag_subquery') flags = db.session \ .query(functions.sum(flag_subquery.c.solve_count * flag_subquery.c.value)).\ first() flags = flags[0] if flags[0] else 0 injects = score_injects(team) team.scores = { 'services_earned': earned, 'services_maximum': maximum, 'injects_earned': injects, 'flags_lost': flags } scoring_teams.append(team) return render_scoring_page('scoring/index.html', teams=scoring_teams)
def is_valid_vote(self, vote): if not (0 <= vote.vote_value <= self.maximum_per_idea): return False (total,) = db.query(functions.sum(TokenIdeaVote.vote_value)).filter( TokenIdeaVote.token_category_id == self.id, TokenIdeaVote.user_id == vote.user_id ).first() if total > self.total_number: return False return True
def listener_count(self): """Return listener count of radio """ from sqlalchemy.sql.functions import sum from sqlalchemy.sql.expression import func session = Session.object_session(self) return session.query(func.ifnull(sum(ProxyConnection.listener), 0)) \ .filter_by(user_id=self.user_id) \ .one()
def eta(self): store = inspect(self).session attr = functions.sum(Video.rendering_eta()) if self.donator: filter = Video.donator & (Video.created_on <= self.created_on) else: filter = Video.donator | (Video.created_on <= self.created_on) query = store.query(attr).filter(Video.rendered_on == None) return query.filter(filter).one()[0] or 0
def get_damage_stats(player_id, weapon_cd, games): """ Provides damage info for weapon_cd by player_id for the past N games. """ try: raw_avg = ( DBSession.query(func.sum(PlayerWeaponStat.actual), func.sum(PlayerWeaponStat.hit)) .filter(PlayerWeaponStat.player_id == player_id) .filter(PlayerWeaponStat.weapon_cd == weapon_cd) .one() ) avg = round(float(raw_avg[0]) / raw_avg[1], 2) # Determine the damage efficiency (hit, fired) numbers for $games games # This is then enumerated to create parameters for a flot graph raw_dmgs = ( DBSession.query(PlayerWeaponStat.game_id, PlayerWeaponStat.actual, PlayerWeaponStat.hit) .filter(PlayerWeaponStat.player_id == player_id) .filter(PlayerWeaponStat.weapon_cd == weapon_cd) .order_by(PlayerWeaponStat.game_id.desc()) .limit(games) .all() ) # they come out in opposite order, so flip them in the right direction raw_dmgs.reverse() dmgs = [] for i in range(len(raw_dmgs)): # try to derive, unless we've hit nothing then set to 0! try: dmg = round(float(raw_dmgs[i][1]) / raw_dmgs[i][2], 2) except: dmg = 0.0 dmgs.append((raw_dmgs[i][0], dmg)) except Exception as e: dmgs = [] avg = 0.0 return (avg, dmgs)
def createReport(self): report_data = [] remaining = 0 report_header = [_("Ledger name"), _("Debt"), _("Credit"), _("Remaining")] col_width = [31, 23, 23, 23] query = config.db.session.query(Subject).select_from(Subject) result = query.filter(Subject.parent_id == 0).all() query1 = config.db.session.query(sum(Notebook.value)) # Check the report parameters if self.builder.get_object("allcontent1").get_active() == True: query1 = query1.select_from(outerjoin(Subject, Notebook, Subject.id == Notebook.subject_id)) else: query1 = query1.select_from(outerjoin(outerjoin(Notebook, Subject, Notebook.subject_id == Subject.id), Bill, Notebook.bill_id == Bill.id)) if self.builder.get_object("atdate1").get_active() == True: date = self.date.getDateObject() query1 = query1.filter(Bill.date == date) else: if self.builder.get_object("betweendates1").get_active() == True: fdate = self.fdate.getDateObject() tdate = self.tdate.getDateObject() if tdate < fdate: msgbox = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL, gtk.MESSAGE_ERROR, gtk.BUTTONS_OK, _("Second date value shouldn't precede the first one.")) msgbox.set_title(_("Invalid date order")) msgbox.run() msgbox.destroy() return query1 = query1.filter(Bill.date.between(fdate, tdate)) for s in result: res = query1.filter(and_(Subject.lft >= s.lft, Subject.lft <= s.rgt, Notebook.value < 0)).first() if res[0] == None: debt_sum = 0 else: debt_sum = res[0] res = query1.filter(and_(Subject.lft >= s.lft, Subject.lft <= s.rgt, Notebook.value > 0)).first() if res[0] == None: credit_sum = 0 else: credit_sum = res[0] remain = credit_sum + debt_sum if remain < 0: remain = "( " + utility.showNumber(-remain) + " )" else: remain = utility.showNumber(remain) report_data.append((s.name, utility.showNumber(-debt_sum), utility.showNumber(credit_sum), remain)) return {"data":report_data, "col-width":col_width ,"heading":report_header}
def intel_alliance(self, message, user, params): alliance = Alliance.load(params.group(1)) if alliance is None: message.reply("No alliance matching '%s' found"%(params.group(1),)) return Q = session.query(sum(Planet.value), sum(Planet.score), sum(Planet.size), sum(Planet.xp), count(), Planet.race) Q = Q.join(Planet.intel) Q = Q.filter(Planet.active == True) Q = Q.filter(Intel.alliance==alliance) Q = Q.group_by(Intel.alliance_id, Planet.race) Q = Q.order_by(asc(Planet.race)) result = Q.all() if len(result) < 1: message.reply("No planets in intel match alliance %s"%(alliance.name,)) return self.execute(message, alliance.name, result)
def top_players_by_time_q(cutoff_days): """ Query for the top players by the amount of time played during a date range. Games older than cutoff_days days old are ignored. """ # only games played during this range are considered right_now = datetime.utcnow() cutoff_dt = right_now - timedelta(days=cutoff_days) top_players_q = DBSession.query(Player.player_id, Player.nick, func.sum(PlayerGameStat.alivetime)).\ filter(Player.player_id == PlayerGameStat.player_id).\ filter(Player.player_id > 2).\ filter(expr.between(PlayerGameStat.create_dt, cutoff_dt, right_now)).\ order_by(expr.desc(func.sum(PlayerGameStat.alivetime))).\ group_by(Player.nick).\ group_by(Player.player_id) return top_players_q
def execute(self, message, user, params): tag_count = PA.getint("numbers", "tag_count") alliance = Alliance.load(params.group(1)) if alliance is None: message.reply("No alliance matching '%s' found"%(params.group(1),)) return Q = session.query(sum(Planet.value), sum(Planet.score), sum(Planet.size), sum(Planet.xp), count()) Q = Q.join(Planet.intel) Q = Q.filter(Planet.active == True) Q = Q.filter(Intel.alliance==alliance) Q = Q.group_by(Intel.alliance_id) result = Q.first() if result is None: message.reply("No planets in intel match alliance %s"%(alliance.name,)) return value, score, size, xp, members = result if members <= tag_count: reply="%s Members: %s/%s, Value: %s, Avg: %s," % (alliance.name,members,alliance.members,value,value//members) reply+=" Score: %s, Avg: %s," % (score,score//members) reply+=" Size: %s, Avg: %s, XP: %s, Avg: %s" % (size,size//members,xp,xp//members) message.reply(reply) return Q = session.query(Planet.value, Planet.score, Planet.size, Planet.xp, Intel.alliance_id) Q = Q.join(Planet.intel) Q = Q.filter(Planet.active == True) Q = Q.filter(Intel.alliance==alliance) Q = Q.order_by(desc(Planet.score)) Q = Q.limit(tag_count) Q = Q.from_self(sum(Planet.value), sum(Planet.score), sum(Planet.size), sum(Planet.xp), count()) Q = Q.group_by(Intel.alliance_id) ts_result = Q.first() ts_value, ts_score, ts_size, ts_xp, ts_members = ts_result reply="%s Members: %s/%s (%s)" % (alliance.name,members,alliance.members,ts_members) reply+=", Value: %s (%s), Avg: %s (%s)" % (value,ts_value,value//members,ts_value//ts_members) reply+=", Score: %s (%s), Avg: %s (%s)" % (score,ts_score,score//members,ts_score//ts_members) reply+=", Size: %s (%s), Avg: %s (%s)" % (size,ts_size,size//members,ts_size//ts_members) reply+=", XP: %s (%s), Avg: %s (%s)" % (xp,ts_xp,xp//members,ts_xp//ts_members) message.reply(reply)
def catalogue_one(self, mount): mi = self.mount_info(mount) volume = self.get_volume(mi) print "Checking database: ", have_files = self.sesh.query(Inode).filter( Inode.volume_id == volume.id ).count() print "{0} file(s),".format(have_files), try: have_size = self.sesh.query( func.sum(Inode.alloc), ).filter( Inode.volume_id == volume.id ).group_by(Inode.volume_id).one()[0] except NoResultFound: have_size = 0 print "{0} MiB".format(int(have_size / (2 ** 20))) scanned = new = 0 def show_progress(): print "found {x}/{tot} file(s), ~{m}/{totm} MiB".format( x=(have_files + new), tot=(mi.vfss.f_files - mi.vfss.f_ffree), m=int(have_size / (2 ** 20)), totm=int(( (mi.vfss.f_bsize * (mi.vfss.f_blocks - mi.vfss.f_bfree)) ) / (2 ** 20)), ) for dirpath, dirnames, filenames in os.walk(mount): reldir = os.path.relpath(dirpath, mi.path) for filename in filenames: try: stat = os.lstat(os.path.join(dirpath, filename)) except OSError, e: print "Failed to stat {fn}: {e}".format( fn=filename, e=e, ) next fobj, inc = self.get_file(mi, stat) relfile = ( os.path.join(reldir, filename) if reldir != "." else filename ) fnobj = self.get_filename(fobj, relfile) scanned += 1 new += inc if inc > 0: have_size += mi.vfss.f_bsize * stat.st_blocks if scanned % 1000 == 0: show_progress() self.sesh.commit()
def is_valid_vote(self, vote): if vote.vote_value < 0: return False if self.maximum_per_idea > 0 and vote.vote_value > self.maximum_per_idea: return False (total,) = self.db.query(functions.sum(TokenIdeaVote.vote_value)).filter( TokenIdeaVote.token_category_id == self.id, TokenIdeaVote.voter_id == vote.voter_id, TokenIdeaVote.tombstone_date == None # noqa: E711 ).first() if total > self.total_number: return False return True