def get_leaderboard(): claims = get_jwt_claims() subquery = ( db.session.query( ValidatedAudio.created_by, func.count(ValidatedAudio.file_name).label('cnt') ) .group_by(ValidatedAudio.created_by) ).subquery() user_ranks = ( db.session.query( subquery, func.rank().over(order_by=desc('cnt')).label('user_rank') ) ).all() user_ranks = [r._asdict() for r in user_ranks] for r in user_ranks: if r['created_by'] != claims['user']: r['created_by'] = abs(hash(r['created_by'])) % (10 ** 8) return jsonify(user_ranks)
def __get_result(model, flight_field, **kw): subq = DBSession \ .query(getattr(Flight, flight_field), func.count('*').label('count'), func.sum(Flight.index_score).label('total')) \ .group_by(getattr(Flight, flight_field)) \ .outerjoin(Flight.model) if 'year' in kw: try: year = int(kw['year']) except: raise HTTPBadRequest year_start = date(year, 1, 1) year_end = date(year, 12, 31) subq = subq.filter(Flight.date_local >= year_start) \ .filter(Flight.date_local <= year_end) subq = subq.subquery() result = DBSession \ .query(model, subq.c.count, subq.c.total, over(func.rank(), order_by=desc('total')).label('rank')) \ .join((subq, getattr(subq.c, flight_field) == model.id)) result = result.order_by(desc('total')) return result
def get_comments(cls, ref, offset=0, limit=20, max_replies=3): comments = OrderedDict() q = Session.query(cls).filter(comments_table.c.ref == ref).filter( comments_table.c.thread_id.is_(None)).order_by( comments_table.c.created_at.desc()).offset(offset).limit(limit) for comment in q.all(): comments[comment.id] = { 'comment': comment, 'replies': [], } if comments and max_replies: sub = Session.query( cls, func.rank().over( order_by=comments_table.c.created_at.asc(), partition_by=comments_table.c.thread_id, ).label('rank')).filter( comments_table.c.thread_id.in_(comments.keys())).filter( comments_table.c.thread_id == comments_table.c.reply_to ).filter(comments_table.c.ref == ref).subquery() q = Session.query(cls).select_entity_from(sub).filter( sub.c.rank <= max_replies) q = q.offset(offset).limit(limit) for reply in q.all(): replies = comments[reply.thread_id]['replies'] replies.append(reply) return [comment for _, comment in comments.iteritems()]
def get(self, project_uuid): """Get the most recent build for each environment of a project. Only environments for which builds have already been requested are considered. Meaning that environments that are part of a project but have never been built won't be part of results. """ # Filter by project uuid. Use a window function to get the most # recently requested build for each environment return. rank = (func.rank().over( partition_by="environment_uuid", order_by=desc("requested_time")).label("rank")) query = db.session.query(models.EnvironmentBuild) query = query.filter_by(project_uuid=project_uuid) query = query.add_column(rank) # Note: this works because rank is of type Label and rank == 1 # will evaluate to sqlalchemy.sql.elements.BinaryExpression # since the equality operator is overloaded. query = query.from_self().filter(rank == 1) query = query.with_entities(models.EnvironmentBuild) env_builds = query.all() return { "environment_builds": [build.as_dict() for build in env_builds] }
def get_standings(session, cup, page, page_size): query = session.query(Jumper, func.count(FinalStanding.id).label('participations'), func.min(FinalStanding.rank).label('top_rank'), func.max(FinalStanding.points).label('top_points'), func.sum(FinalStanding.i).label('i'), func.sum(FinalStanding.ii).label('ii'), func.sum(FinalStanding.iii).label('iii'), func.sum(FinalStanding.n).label('hill_participations'), func.sum(FinalStanding.points).label('total_jump_points'), func.sum(FinalStanding.cup_points).label('total_cup_points'), func.rank().over(order_by=desc(func.sum(FinalStanding.points)) if cup.rank_method == 'JumpPoints' else (desc(func.sum(FinalStanding.cup_points)), desc(func.sum(FinalStanding.points)))).label('rank')) \ .join(FinalStanding) \ .join(Tournament) \ .join(CupDate) \ .filter(CupDate.cup_id == cup.id) \ .group_by(Jumper) \ .order_by('rank') total_count = query.count() data = query \ .offset((page - 1) * page_size) \ .limit(page_size) \ .all() return data, Pagination(page, page_size, total_count)
def _get_result(model, flight_field, year=None): subq = db.session \ .query(getattr(Flight, flight_field), func.count('*').label('count'), func.sum(Flight.index_score).label('total')) \ .group_by(getattr(Flight, flight_field)) \ .outerjoin(Flight.model) if isinstance(year, int): year_start = date(year, 1, 1) year_end = date(year, 12, 31) subq = subq.filter(Flight.date_local >= year_start) \ .filter(Flight.date_local <= year_end) subq = subq.subquery() result = db.session \ .query(model, subq.c.count, subq.c.total, over(func.rank(), order_by=desc('total')).label('rank')) \ .join((subq, getattr(subq.c, flight_field) == model.id)) if model == User: result = result.options(subqueryload(model.club)) result = result.order_by(desc('total')) return result
def _get_result(model, flight_field, year=None): subq = ( db.session.query( getattr(Flight, flight_field), func.count("*").label("count"), func.sum(Flight.index_score).label("total"), ) .group_by(getattr(Flight, flight_field)) .outerjoin(Flight.model) .filter(Flight.is_rankable()) ) if isinstance(year, int): year_start = date(year, 1, 1) year_end = date(year, 12, 31) subq = subq.filter(Flight.date_local >= year_start).filter( Flight.date_local <= year_end ) subq = subq.subquery() result = db.session.query( model, subq.c.count, subq.c.total, over(func.rank(), order_by=desc("total")).label("rank"), ).join((subq, getattr(subq.c, flight_field) == model.id)) if model == User: result = result.outerjoin(model.club) result = result.options(eagerload(model.club)) return result
def _get_result_Flight(table, table_column, year=None): subq = ( db.session.query( getattr(Flight, table_column), func.count("*").label("count"), func.sum(Flight.index_score).label("total"), ).group_by(getattr(Flight, table_column)).outerjoin( Flight.model) #glider model .filter(Flight.is_rankable())) if isinstance(year, int): year_start = date(year, 1, 1) year_end = date(year, 12, 31) subq = subq.filter(Flight.date_local >= year_start).filter( Flight.date_local <= year_end) subq = subq.subquery() result = db.session.query( table, subq.c.count, subq.c.total, over(func.rank(), order_by=desc("total")).label("rank"), ).join((subq, getattr(subq.c, table_column) == table.id)) if table == User: result = result.outerjoin(table.club) result = result.options(eagerload(table.club)) return result
def _get_result_Flight_User_byClub(year=None): '''return the number of users in flights that are recorded under the club id. Rankable flight means public flight. Sort by number of flights this year''' query = db.session.query( \ Flight.club_id, \ func.count((Flight.pilot_id.distinct())).label('users_count') \ , func.count(Flight.id).label('flights_count')) \ .group_by(Flight.club_id) if isinstance(year, int): # if year is None, then get flights for all seasons year_start = date(year, 1, 1) year_end = date(year, 12, 31) query = query.filter(Flight.date_local >= year_start)\ .filter(Flight.date_local <= year_end) subq = query.subquery() result = db.session.query( Club, subq.c.flights_count, subq.c.users_count, over(func.rank(), order_by=desc("flights_count")).label("rank"), ).join((subq, getattr(subq.c, "club_id") == Club.id)) return result
def __get_result(model, flight_field, **kw): subq = ( DBSession.query( getattr(Flight, flight_field), func.count("*").label("count"), func.sum(Flight.index_score).label("total"), ) .group_by(getattr(Flight, flight_field)) .outerjoin(Flight.model) ) if "year" in kw: try: year = int(kw["year"]) except: raise HTTPBadRequest year_start = date(year, 1, 1) year_end = date(year, 12, 31) subq = subq.filter(Flight.date_local >= year_start).filter(Flight.date_local <= year_end) subq = subq.subquery() result = DBSession.query( model, subq.c.count, subq.c.total, over(func.rank(), order_by=desc("total")).label("rank") ).join((subq, getattr(subq.c, flight_field) == model.id)) result = result.order_by(desc("total")) return result
def test_funcfilter_windowing_orderby(self): # test filtered windowing: self.assert_compile( select([ func.rank().filter(table1.c.name > 'foo').over( order_by=table1.c.name) ]), "SELECT rank() FILTER (WHERE mytable.name > :name_1) " "OVER (ORDER BY mytable.name) AS anon_1 FROM mytable")
def test_funcfilter_windowing_orderby_partitionby(self): self.assert_compile( select([ func.rank().filter(table1.c.name > 'foo').over( order_by=table1.c.name, partition_by=['description']) ]), "SELECT rank() FILTER (WHERE mytable.name > :name_1) " "OVER (PARTITION BY mytable.description ORDER BY mytable.name) " "AS anon_1 FROM mytable")
def get_rank(cls, id): subquery = db_session.query( Squads.id, func.rank().over( order_by=Squads.total_points.desc()).label('rank')).subquery() return db_session.query(subquery).filter( subquery.c.id == id).first().rank
def test_funcfilter_windowing_rows(self): self.assert_compile( select([ func.rank().filter(table1.c.name > 'foo').over( rows=(1, 5), partition_by=['description']) ]), "SELECT rank() FILTER (WHERE mytable.name > :name_1) " "OVER (PARTITION BY mytable.description ROWS BETWEEN :param_1 " "FOLLOWING AND :param_2 FOLLOWING) " "AS anon_1 FROM mytable")
def historical_rank(cls, session, student_id: int): rank_func = func.rank().\ over(order_by=cls.gpa.desc()).\ label("rank") students_ranked = session.query(cls, rank_func).\ subquery() rank = session.query(students_ranked.c.rank).\ filter(students_ranked.c.id == student_id) return rank.scalar()
def test_funcfilter_windowing_range(self): self.assert_compile( select(func.rank().filter(table1.c.name > "foo").over( range_=(1, 5), partition_by=["description"])), "SELECT rank() FILTER (WHERE mytable.name > :name_1) " "OVER (PARTITION BY mytable.description RANGE BETWEEN :param_1 " "FOLLOWING AND :param_2 FOLLOWING) " "AS anon_1 FROM mytable", )
def class_rank(cls, session, grad_year: int, student_id: int): rank_func = func.rank().\ over(order_by=cls.gpa.desc()).\ label("rank") grad_class_ranked = session.query(cls, rank_func).\ filter_by(grad_year=grad_year).\ subquery() rank = session.query(grad_class_ranked.c.rank).\ filter(grad_class_ranked.c.id == student_id) return rank.scalar()
def get_user_validated_audio_count(lang): claims = get_jwt_claims() lang_validated_count = ( db.session.query( ValidatedAudio.file_name ) .filter_by(expected_language_code=lang, created_by=claims['user']) .distinct() .count() ) total_validated_count = ( db.session.query( ValidatedAudio.file_name ) .filter_by(created_by=claims['user']) .count() ) user_rank = ( db.session.query( func.count(ValidatedAudio.file_name).label("cnt") ) .group_by(ValidatedAudio.created_by) .order_by(desc("cnt")) ).first() subquery = ( db.session.query( ValidatedAudio.created_by, func.count(ValidatedAudio.file_name).label('cnt') ) .group_by(ValidatedAudio.created_by) ).subquery() user_ranks = ( db.session.query( subquery, func.rank().over(order_by=desc('cnt')).label('user_rank') ) ).subquery() user_rank = db.session.query(user_ranks).filter_by(created_by=claims['user']).first() users_count = db.session.query(ValidatedAudio.created_by).distinct().count() return jsonify( { 'languageValidatedCount': lang_validated_count, 'totalValidatedCount': total_validated_count, 'rank': user_rank._asdict()['user_rank'] if user_rank else 0, 'usersCount': users_count } )
def get_last_measurements(use_cache=True): app.logger.info("---> get_last_measurements()") measurements_array = [] # Check if a cache file exists to avoid querying the database # TODO: put cache file path in ONE place if os.path.isfile("/shared_data/last_measurements.json") and use_cache: with open("/shared_data/last_measurements.json") as measurements_file: app.logger.debug("Returning cache data") return json.load(measurements_file) app.logger.debug("Executing subquery") # Subquery to get all measurements ranked by station based on date. Rank 1 is the most recent subquery = db.session.query( Measurement, func.rank().over( order_by=Measurement.date_created.desc(), partition_by=Measurement.station ).label('rnk') ).subquery() app.logger.debug("End of subquery execution") # Query to get the first ranked measurment (most recent), and then joined with Station # to extract readable name and coordinates app.logger.debug("Executing JOIN") query = db.session.query(subquery,Station).filter(subquery.c.rnk==1).filter(Station.code==subquery.c.station) app.logger.debug("End of JOIN execution") # Result is a tuple with a Station object in the 11th position. Create a full # measurement object with the station name and coordinates for ms in query: full_ms = {} full_ms['name']= ms[11].name full_ms['lat'] = float(ms[11].lat) full_ms['lon'] = float(ms[11].lon) # Convert date to Spanish Time full_ms['date_created'] = sp_date_str(ms.date_created) full_ms['current_temp'] = float(ms.current_temp) full_ms['current_hum'] = float(ms.current_hum) full_ms['current_pres'] = float(ms.current_pres) full_ms['wind_speed'] = float(ms.current_pres) full_ms['max_gust'] = float(ms.max_gust) full_ms['wind_direction'] = float(ms.wind_direction) full_ms['rainfall'] = float(ms.rainfall) full_ms['code'] = ms.station measurements_array.append(full_ms) app.logger.info("Returning %d measurements" % len(measurements_array)) return measurements_array
def query_byTop(self, i): subquery = self.sqlite_session.query( zhihuTables.authorName, func.rank().over( order_by=zhihuTables.vote.desc()).label('rnk')).subquery() result = self.sqlite_session.query(subquery).filter( subquery.c.rnk == 1) # result = self.sqlite_session.query(zhihuTables.authorName, zhihuTables.vote, zhihuTables.content).func.rank(i).over(order_by='vote') # print(result) return result
def test_no_paren_fns(self): for fn, expected in [ (func.uid(), "uid"), (func.UID(), "UID"), (func.sysdate(), "sysdate"), (func.row_number(), "row_number()"), (func.rank(), "rank()"), (func.now(), "CURRENT_TIMESTAMP"), (func.current_timestamp(), "CURRENT_TIMESTAMP"), (func.user(), "USER"), ]: self.assert_compile(fn, expected)
def test_funcfilter_windowing_orderby_partitionby(self): self.assert_compile( select( [ func.rank() .filter(table1.c.name > "foo") .over(order_by=table1.c.name, partition_by=["description"]) ] ), "SELECT rank() FILTER (WHERE mytable.name > :name_1) " "OVER (PARTITION BY mytable.description ORDER BY mytable.name) " "AS anon_1 FROM mytable", )
def view_result(embedding_id, testset_id): # Calculate the ranking of the embedding on the testset. partition = func.rank().over(partition_by=Result.testset_id, order_by=Result.accuracy.desc()).label('rank') sq = db.query(Result, partition).subquery() result = db.query(sq).filter(sq.c.embedding_id == embedding_id, sq.c.testset_id == testset_id).first() if not result: abort(404) return jsonify(data=serialize_result(result, summary=False))
def test_funcfilter_windowing_orderby(self): # test filtered windowing: self.assert_compile( select([ func.rank().filter( table1.c.name > 'foo' ).over( order_by=table1.c.name ) ]), "SELECT rank() FILTER (WHERE mytable.name > :name_1) " "OVER (ORDER BY mytable.name) AS anon_1 FROM mytable" )
def _get_last_classif_history(self, from_user_id: Optional[int], but_not_from_user_id: Optional[int]) \ -> List[HistoricalLastClassif]: """ Query for last classification history on all objects of self, mixed with present state in order to have restore-able lines. """ # Get the histo entries subqry = self.session.query( ObjectsClassifHisto, func.rank().over( partition_by=ObjectsClassifHisto.objid, order_by=ObjectsClassifHisto.classif_date.desc()).label("rnk")) if from_user_id: subqry = subqry.filter( ObjectsClassifHisto.classif_who == from_user_id) if but_not_from_user_id: subqry = subqry.filter( ObjectsClassifHisto.classif_who != but_not_from_user_id) subqry = subqry.filter(ObjectsClassifHisto.classif_type == "M") subqry = subqry.filter( ObjectsClassifHisto.objid == any_(self.object_ids)).subquery() # Also get some fields from ObjectHeader for referencing, info, and fallback qry = self.session.query( ObjectHeader.objid, ObjectHeader.classif_id, func.coalesce(subqry.c.classif_date, ObjectHeader.classif_auto_when), subqry.c.classif_type, func.coalesce(subqry.c.classif_id, ObjectHeader.classif_auto_id).label("h_classif_id"), func.coalesce( subqry.c.classif_qual, case([(ObjectHeader.classif_auto_id.isnot(None), 'P')])), subqry.c.classif_who) qry = qry.join(subqry, ObjectHeader.objid == subqry.c.objid, isouter=(from_user_id is None)) if from_user_id is not None: # If taking history from a user, don't apply to the objects he/she classsified # in last already. qry = qry.filter(ObjectHeader.classif_who != from_user_id) qry = qry.filter(subqry.c.rnk == 1) else: # Taking any history, including nothing, so emit blank history (see isouter above) qry = qry.filter(ObjectHeader.objid == any_(self.object_ids)) qry = qry.filter(or_(subqry.c.rnk == 1, subqry.c.rnk.is_(None))) logger.info("_get_last_classif_history qry:%s", str(qry)) with CodeTimer("HISTORY for %d objs: " % len(self.object_ids), logger): ret = [HistoricalLastClassif(rec) for rec in qry.all()] logger.info("_get_last_classif_history qry: %d rows", len(ret)) return ret
def get_failed_build_priority_query(self): rank = func.rank().over(partition_by=Package.id, order_by=Build.id.desc()).label('rank') sub = self.db.query(Package.id.label('pkg_id'), Build.state, rank)\ .outerjoin(Build, Package.id == Build.package_id)\ .subquery() return self.db.query(sub.c.pkg_id, literal_column(str(self.failed_priority)) .label('priority'))\ .filter(((sub.c.rank == 1) & (sub.c.state == 5)) | ((sub.c.rank == 2) & (sub.c.state != 5)))\ .group_by(sub.c.pkg_id)\ .having(func.count(sub.c.pkg_id) == 2)
def test_funcfilter_windowing_rows(self): self.assert_compile( select( [ func.rank() .filter(table1.c.name > "foo") .over(rows=(1, 5), partition_by=["description"]) ] ), "SELECT rank() FILTER (WHERE mytable.name > :name_1) " "OVER (PARTITION BY mytable.description ROWS BETWEEN :param_1 " "FOLLOWING AND :param_2 FOLLOWING) " "AS anon_1 FROM mytable", )
def get_failed_build_priority_query(self): rank = func.rank().over(partition_by=Package.id, order_by=Build.id.desc()).label('rank') sub = self.db.query(Package.id.label('pkg_id'), Build.state, Build.deps_resolved, rank)\ .outerjoin(Build, Package.id == Build.package_id)\ .subquery() failed_prio = get_config('priorities.failed_build_priority') return self.db.query( sub.c.pkg_id, literal_column(str(failed_prio)).label('priority') ).filter( ((sub.c.rank == 1) & ((sub.c.state == 5) | (sub.c.deps_resolved == False))) | ((sub.c.rank == 2) & (sub.c.state != 5)) ).group_by(sub.c.pkg_id).having(func.count(sub.c.pkg_id) == 2)
def hypo(): team1 = request.form['team1'] team2 = request.form['team2'] team3 = request.form['team3'] team4 = request.form['team4'] team5 = request.form['team5'] gen = request.form['gender'] dist = request.form['event'] rightschools = db.session.query( models.athlete.athlete_name.label('athlete_name'), models.athlete.school_name.label('school_name'), models.athlete.event.label('event'), models.athlete.best_mark.label('best_mark'), models.athlete.gender.label('gender')).filter( and_( or_(models.athlete.school_name == team1, models.athlete.school_name == team2, models.athlete.school_name == team3, models.athlete.school_name == team4, models.athlete.school_name == team5), models.athlete.gender == gen, models.athlete.event == dist)).order_by( asc(models.athlete.best_mark)).subquery() sq1 = db.session.query( rightschools.c.athlete_name.label('athlete_name'), rightschools.c.school_name.label('school_name'), rightschools.c.best_mark.label('mark'), func.rank().over(order_by=rightschools.c.best_mark.asc(), partition_by=rightschools.c.school_name).label( 'tmsrnk')).subquery() sq2 = db.session.query( func.row_number().over(order_by=sq1.c.mark).label('points'), sq1.c.tmsrnk, sq1.c.athlete_name.label('athlete_name'), sq1.c.school_name.label('school_name'), sq1.c.mark.label('mark')).filter( sq1.c.tmsrnk < 8).order_by('mark').subquery() q2 = db.session.query( func.row_number().over(order_by=sq1.c.mark).label('points'), sq1.c.tmsrnk, sq1.c.athlete_name.label('athlete_name'), sq1.c.school_name.label('school_name'), sq1.c.mark.label('mark')).filter(sq1.c.tmsrnk < 8).order_by('mark') q3 = db.session.query(sq2.c.school_name.label('school_name'), func.sum(sq2.c.points).label('points')).group_by( sq2.c.school_name).order_by('points') return render_template('hypo.html', hypo=q2, teams=q3)
def get_rt_resource_usage(param=None): if param is None: param = {'resource_name': '', 'usage_type_code': ''} subquery = db.session.query( ResourceUsage, func.rank().over(order_by=ResourceUsage.imported_at.desc(), partition_by=(ResourceUsage.resource_id, ResourceUsage.usage_type_code )).label('imported_rank')).subquery( name='resource_usage') query = db.session.query(subquery).filter( subquery.c.imported_rank == 1, subquery.c.resource_name.like('%{}%'.format(str( param['resource_name']))), subquery.c.usage_type_code.like('%{}%'.format( str(param['usage_type_code'])))) return query
def __get_result(model, flight_field): subq = ( DBSession.query( getattr(Flight, flight_field), func.count("*").label("count"), func.sum(Flight.index_score).label("total"), ) .group_by(getattr(Flight, flight_field)) .outerjoin(Flight.model) .subquery() ) result = DBSession.query( model, subq.c.count, subq.c.total, over(func.rank(), order_by=desc("total")).label("rank") ).join((subq, getattr(subq.c, flight_field) == model.id)) result = result.order_by(desc("total")) return result
def get_k_to_last_subscriptions( self, # Which set of subscriptions we want, first to last (= the last one), second to last (penultimate), # third to last ... k: int = 1): k_to_Last_subscriptions_query = self.bq_session.query( *[column.label(column.name) for column in self.sub_data.columns], func.rank().over(partition_by=self.sub_data.c['user_id'], order_by=self.sub_data.c['start_time']).label( 'reverse_order_rank'), self.sub_web_access(), func.safe_divide( (self.sub_data.c["amount"] - self.sub_data.c["additional_amount"]), self.sub_data.c['length']).label('daily_price')).filter( self.sub_data.c['start_time'].cast( DATE) <= self.max_date.date()).subquery() k_to_Last_subscriptions_query = self.bq_session.query(*[ column.label(column.name) for column in k_to_Last_subscriptions_query.columns ]).filter(k_to_Last_subscriptions_query.c['reverse_order_rank'] == k).subquery() sub_prices_query = self.sub_prices_query() k_to_Last_subscriptions_query = self.bq_session.query( *[ column.label(column.name) for column in k_to_Last_subscriptions_query.columns ], case([(sub_prices_query.c['average_price'] == 0, 0.0), (1 - k_to_Last_subscriptions_query.c['daily_price'] / sub_prices_query.c['average_price'] >= 0.2, 1.0)], else_=0.0).label('is_discount')).join( sub_prices_query, and_( k_to_Last_subscriptions_query.c['web_access_level'] == sub_prices_query.c['web_access_level'], *[ k_to_Last_subscriptions_query.c[column] == sub_prices_query.c[column] for column in self.sub_desc_column_names ])).subquery() return k_to_Last_subscriptions_query
def list_results(): embedding_id = request.args.get('embedding', None) testset_id = request.args.get('testset', None) # Calculate the ranking of the embedding on the testset. partition = func.rank().over(partition_by=Result.testset_id, order_by=Result.accuracy.desc()).label('rank') sq = db.query(Result, partition).subquery() query = db.query(sq) if embedding_id: query = query.filter(sq.c.embedding_id == int(embedding_id)) if testset_id: query = query.filter(sq.c.testset_id == int(testset_id)) results = query.all() data = [serialize_result(res) for res in results] meta = {'count': len(data)} return jsonify(data=data, meta=meta)
def index(self, **kw): subq = DBSession.query(TrackingFix, over(func.rank(), partition_by=TrackingFix.pilot_id, order_by=desc(TrackingFix.time)).label('rank')) \ .outerjoin(TrackingFix.pilot) \ .filter(TrackingFix.time >= datetime.utcnow() - timedelta(hours=6)) \ .filter(TrackingFix.location_wkt != None) \ .subquery() query = DBSession.query(TrackingFix) \ .filter(TrackingFix.id == subq.c.id) \ .filter(subq.c.rank == 1) \ .order_by(desc(TrackingFix.time)) tracks = [] for track in query.all(): airport = Airport.by_location(track.location, None) distance = airport.distance(track.location) tracks.append([track, airport, distance]) return dict(tracks=tracks)
def _comic_archive_shared(parent_folder, request): folders = (session.query(GalleryFolder).filter( GalleryFolder.parent == parent_folder).options( joinedload(GalleryFolder.children)).order_by( GalleryFolder.order).all()) folder_ids = [folder.id for folder in folders] child_folder_ids = [ child.id for folder in folders for child in folder.children ] # XXX remove this; currently used by _base.mako and the test below comic = session.query(Comic).order_by(Comic.id.asc()).first() if request.has_permission('queue', comic): queued_clause = True else: queued_clause = ~GalleryItem.is_queued recent_pages_by_folder = defaultdict(list) date_range_by_folder = dict() page_count_by_folder = dict() all_seen_items = set() if folder_ids: recent_page_subq = (session.query( GalleryItem, func.rank().over( partition_by=GalleryItem.folder_id, order_by=GalleryItem.order.asc(), ).label('rank_first'), func.rank().over( partition_by=GalleryItem.folder_id, order_by=GalleryItem.order.desc(), ).label('rank_last'), ).filter(GalleryItem.folder_id.in_(folder_ids)).filter( queued_clause).subquery()) GalleryItem_alias = aliased(GalleryItem, recent_page_subq) recent_page_q = (session.query(GalleryItem_alias).filter( (recent_page_subq.c.rank_last <= FOLDER_PREVIEW_PAGE_COUNT) | (recent_page_subq.c.rank_first == 1)).order_by( GalleryItem_alias.order.asc())) for item in recent_page_q: recent_pages_by_folder[item.folder].append(item) all_seen_items.add(item) # Snag the start/end dates for each folder and the number of items in # each group_q = (session.query( GalleryFolder, func.min(GalleryItem.date_published), func.max(GalleryItem.date_published), func.count('*')).join(GalleryFolder.pages).filter( GalleryItem.folder_id.in_(folder_ids)).filter( queued_clause).group_by(GalleryFolder.id)) tz = XXX_HARDCODED_TIMEZONE for folder, mindate, maxdate, count in group_q: date_range_by_folder[folder] = ( tz.normalize(mindate.astimezone(tz)), tz.normalize(maxdate.astimezone(tz)), ) page_count_by_folder[folder] = count first_page_by_folder = {} if child_folder_ids: GalleryFolder_descendants = aliased(GalleryFolder) recent_page_q = (session.query(GalleryFolder, GalleryItem).join( (GalleryFolder_descendants, and_( GalleryFolder.left <= GalleryFolder_descendants.left, GalleryFolder.right >= GalleryFolder_descendants.right, ))).join((GalleryItem, GalleryFolder_descendants.pages)).filter( GalleryFolder.id.in_(child_folder_ids)).order_by( GalleryFolder.id, GalleryItem.order.asc()).distinct(GalleryFolder.id)) first_page_by_folder = { folder: page for (folder, page) in recent_page_q } all_seen_items.update(first_page_by_folder.values()) # Eagerload media rows for every item we've seen (session.query(GalleryItem).options(joinedload(GalleryItem.media)).filter( GalleryItem.id.in_([item.id for item in all_seen_items])).all()) return dict( comic=comic, parent_folder=parent_folder, folders=folders, recent_pages_by_folder=recent_pages_by_folder, first_page_by_folder=first_page_by_folder, date_range_by_folder=date_range_by_folder, page_count_by_folder=page_count_by_folder, )
def _comic_archive_shared(parent_folder, request): folders = ( session.query(GalleryFolder) .filter(GalleryFolder.parent == parent_folder) .options( joinedload(GalleryFolder.children) ) .order_by(GalleryFolder.order) .all() ) folder_ids = [folder.id for folder in folders] child_folder_ids = [ child.id for folder in folders for child in folder.children ] # XXX remove this; currently used by _base.mako and the test below comic = session.query(Comic).order_by(Comic.id.asc()).first() if request.has_permission('queue', comic): queued_clause = True else: queued_clause = ~GalleryItem.is_queued recent_pages_by_folder = defaultdict(list) date_range_by_folder = dict() page_count_by_folder = dict() all_seen_items = set() if folder_ids: recent_page_subq = ( session.query( GalleryItem, func.rank().over( partition_by=GalleryItem.folder_id, order_by=GalleryItem.order.asc(), ).label('rank_first'), func.rank().over( partition_by=GalleryItem.folder_id, order_by=GalleryItem.order.desc(), ).label('rank_last'), ) .filter(GalleryItem.folder_id.in_(folder_ids)) .filter(queued_clause) .subquery() ) GalleryItem_alias = aliased(GalleryItem, recent_page_subq) recent_page_q = ( session.query(GalleryItem_alias) .filter( (recent_page_subq.c.rank_last <= FOLDER_PREVIEW_PAGE_COUNT) | (recent_page_subq.c.rank_first == 1) ) .order_by(GalleryItem_alias.order.asc()) ) for item in recent_page_q: recent_pages_by_folder[item.folder].append(item) all_seen_items.add(item) # Snag the start/end dates for each folder and the number of items in # each group_q = ( session.query( GalleryFolder, func.min(GalleryItem.date_published), func.max(GalleryItem.date_published), func.count('*') ) .join(GalleryFolder.pages) .filter(GalleryItem.folder_id.in_(folder_ids)) .filter(queued_clause) .group_by(GalleryFolder.id) ) tz = XXX_HARDCODED_TIMEZONE for folder, mindate, maxdate, count in group_q: date_range_by_folder[folder] = ( tz.normalize(mindate.astimezone(tz)), tz.normalize(maxdate.astimezone(tz)), ) page_count_by_folder[folder] = count first_page_by_folder = {} if child_folder_ids: GalleryFolder_descendants = aliased(GalleryFolder) recent_page_q = ( session.query(GalleryFolder, GalleryItem) .join((GalleryFolder_descendants, and_( GalleryFolder.left <= GalleryFolder_descendants.left, GalleryFolder.right >= GalleryFolder_descendants.right, ))) .join((GalleryItem, GalleryFolder_descendants.pages)) .filter(GalleryFolder.id.in_(child_folder_ids)) .order_by(GalleryFolder.id, GalleryItem.order.asc()) .distinct(GalleryFolder.id) ) first_page_by_folder = { folder: page for (folder, page) in recent_page_q } all_seen_items.update(first_page_by_folder.values()) # Eagerload media rows for every item we've seen ( session.query(GalleryItem) .options( joinedload(GalleryItem.media) ) .filter(GalleryItem.id.in_([item.id for item in all_seen_items])) .all() ) return dict( comic=comic, parent_folder=parent_folder, folders=folders, recent_pages_by_folder=recent_pages_by_folder, first_page_by_folder=first_page_by_folder, date_range_by_folder=date_range_by_folder, page_count_by_folder=page_count_by_folder, )
def get_user_medals(user: User) -> Tuple[List[Medal], List[Medal]]: unlocked = [] locked = [] # # REVIEWS # users_by_reviews = db.session.query(User.username, func.count(PackageReview.id).label("count")) \ .select_from(User).join(PackageReview) \ .group_by(User.username).order_by(text("count DESC")).all() try: review_boundary = users_by_reviews[math.floor(len(users_by_reviews) * 0.25)][1] + 1 except IndexError: review_boundary = None users_by_reviews = [username for username, _ in users_by_reviews] review_idx = None review_percent = None try: review_idx = users_by_reviews.index(user.username) review_percent = round(100 * review_idx / len(users_by_reviews), 1) except ValueError: pass if review_percent is not None and review_percent < 25: if review_idx == 0: title = gettext(u"Most reviews") description = gettext( u"%(display_name)s has written the most reviews on ContentDB.", display_name=user.display_name) elif review_idx <= 2: if review_idx == 1: title = gettext(u"2nd most reviews") else: title = gettext(u"3rd most reviews") description = gettext( u"This puts %(display_name)s in the top %(perc)s%%", display_name=user.display_name, perc=review_percent) else: title = gettext(u"Top %(perc)s%% reviewer", perc=review_percent) description = gettext(u"Only %(place)d users have written more reviews.", place=review_idx) unlocked.append(Medal.make_unlocked( place_to_color(review_idx + 1), "fa-star-half-alt", title, description)) else: description = gettext(u"Consider writing more reviews to get a medal.") if review_idx: description += " " + gettext(u"You are in place %(place)s.", place=review_idx + 1) locked.append(Medal.make_locked( description, (len(user.reviews), review_boundary))) # # TOP PACKAGES # all_package_ranks = db.session.query( Package.type, Package.author_id, func.rank().over( order_by=db.desc(Package.score), partition_by=Package.type) \ .label("rank")).order_by(db.asc(text("rank"))) \ .filter_by(state=PackageState.APPROVED).subquery() user_package_ranks = db.session.query(all_package_ranks) \ .filter_by(author_id=user.id) \ .filter(text("rank <= 30")) \ .all() user_package_ranks = next( (x for x in user_package_ranks if x[0] == PackageType.MOD or x[2] <= 10), None) if user_package_ranks: top_rank = user_package_ranks[2] top_type = PackageType.coerce(user_package_ranks[0]) if top_rank == 1: title = gettext(u"Top %(type)s", type=top_type.value.lower()) else: title = gettext(u"Top %(group)d %(type)s", group=top_rank, type=top_type.value.lower()) if top_type == PackageType.MOD: icon = "fa-box" elif top_type == PackageType.GAME: icon = "fa-gamepad" else: icon = "fa-paint-brush" description = gettext(u"%(display_name)s has a %(type)s placed at #%(place)d.", display_name=user.display_name, type=top_type.value.lower(), place=top_rank) unlocked.append( Medal.make_unlocked(place_to_color(top_rank), icon, title, description)) # # DOWNLOADS # total_downloads = db.session.query(func.sum(Package.downloads)) \ .select_from(User) \ .join(User.packages) \ .filter(User.id == user.id, Package.state == PackageState.APPROVED).scalar() if total_downloads is None: pass elif total_downloads < 50000: description = gettext(u"Your packages have %(downloads)d downloads in total.", downloads=total_downloads) description += " " + gettext(u"First medal is at 50k.") locked.append(Medal.make_locked(description, (total_downloads, 50000))) else: if total_downloads >= 300000: place = 1 title = gettext(u">300k downloads") elif total_downloads >= 100000: place = 2 title = gettext(u">100k downloads") elif total_downloads >= 75000: place = 3 title = gettext(u">75k downloads") else: place = 10 title = gettext(u">50k downloads") description = gettext(u"Has received %(downloads)d downloads across all packages.", display_name=user.display_name, downloads=total_downloads) unlocked.append(Medal.make_unlocked(place_to_color(place), "fa-users", title, description)) return unlocked, locked
def ranked(event_type): return func.rank().over( order_by=event_type.clock.desc(), partition_by=event_type.container_id, )
def _env_images_that_can_be_deleted( project_uuid: Optional[str] = None, environment_uuid: Optional[str] = None, latest_can_be_removed: bool = False, ) -> List[models.EnvironmentImage]: """Gets environment images that are not in use and can be deleted. An env image to be considered as such needs to respect the following requirements: - not in use by a session, job or pipeline run - not the "latest" image for a given environment, because that would mean that a new session, job or pipeline run could use it - not already marked_for_removal - doesn't share the digest with an image that does not respect point 1 or 2. This is because the digest is the real "identity" of the image, meaning that a name:tag actually points to digest, and a digest might be pointed at by different image:tag combinations . The registry doesn't allow deletion by image:tag but by digest, hence the edge case. Essentially, we don't want to delete a digest that is in use. Given this, the following query checks for env images which digest is not in the digests that are in use or are considered "latest". When it comes to edge cases (assuming the query is correct): - we don't run the risk of deleting a digest which is in use or latest - we don't run the risk of deleting a digest right after a new "latest" image that uses the same digest has been pushed assuming the task performing the registry deletion runs in the env builds queue, which ensures that no build is taking place - on the nodes, the node-agent is responsible for issuing a deletion to the client, said deletion happens by name:tag, which avoids the risk of deletion unintended images. """ if latest_can_be_removed and (project_uuid is None or environment_uuid is None): raise ValueError( "'latest_can_be_removed' requires project and env uuid to be passed." ) # Digests in use. imgs_in_use_by_int_runs = (db.session.query( models.EnvironmentImage.digest).join( models.PipelineRunInUseImage, models.EnvironmentImage.runs_using_image).join( models.InteractivePipelineRun, models.InteractivePipelineRun.uuid == models.PipelineRunInUseImage.run_uuid, ).filter( models.InteractivePipelineRun.status.in_( ["PENDING", "STARTED"]))) imgs_in_use_by_sessions = (db.session.query( models.EnvironmentImage.digest).join( models.InteractiveSessionInUseImage, models.EnvironmentImage.sessions_using_image, ).join( models.InteractiveSession, models.InteractiveSession.project_uuid == models.InteractiveSessionInUseImage.project_uuid and models.InteractiveSessionInUseImage.pipeline_uuid == models.InteractiveSessionInUseImage.pipeline_uuid, )) imgs_in_use_by_jobs = (db.session.query( models.EnvironmentImage.digest).join( models.JobInUseImage, models.EnvironmentImage.jobs_using_image).join( models.Job, models.Job.uuid == models.JobInUseImage.job_uuid).filter( models.Job.status.in_( ["DRAFT", "PENDING", "STARTED", "PAUSED"]))) imgs_in_use = imgs_in_use_by_int_runs.union( imgs_in_use_by_sessions, imgs_in_use_by_jobs).subquery() # Latest images digests. if not latest_can_be_removed: # Will produce a subquery like the following: # select project_uuid, environment_uuid, tag, # #### # rank() OVER ( # partition by project_uuid, environment_uuid # order by tag desc # ) rank # from environment_images # order by project_uuid, environment_uuid, tag desc; # #### # Which will give rank 1 to the "latest" image of every # environment, example: # project_uuid | environment_uuid | tag | rank # --------------+------------------+-----+------ # 13006c56-... | 18b59993-... | 1 | 1 # 13006c56-... | c56ab762-... | 2 | 1 # 13006c56-... | c56ab762-... | 1 | 2 # 13006c56-... | e0af758d-... | 3 | 1 # 13006c56-... | e0af758d-... | 2 | 2 # 13006c56-... | e0af758d-... | 1 | 3 tag_rank = (func.rank().over( partition_by=[ models.EnvironmentImage.project_uuid, models.EnvironmentImage.environment_uuid, ], order_by=models.EnvironmentImage.tag.desc(), ).label("tag_rank")) latest_imgs = db.session.query( models.EnvironmentImage).add_column(tag_rank) latest_imgs = (latest_imgs.from_self().filter( tag_rank == 1).with_entities( models.EnvironmentImage.digest).subquery()) imgs_not_in_use = models.EnvironmentImage.query.with_for_update().filter( models.EnvironmentImage.digest.not_in(imgs_in_use), # Assume it's already been processed. models.EnvironmentImage.marked_for_removal.is_(False), ) if not latest_can_be_removed: imgs_not_in_use = imgs_not_in_use.filter( models.EnvironmentImage.digest.not_in(latest_imgs), ) if project_uuid: imgs_not_in_use = imgs_not_in_use.filter( models.EnvironmentImage.project_uuid == project_uuid) if environment_uuid: imgs_not_in_use = imgs_not_in_use.filter( models.EnvironmentImage.environment_uuid == environment_uuid) return imgs_not_in_use.all()