def build_all_sessions_query( self, events: QuerySet, _date_gte=Q()) -> Tuple[Query, QueryParams]: sessions = (events.filter(_date_gte).annotate( previous_timestamp=Window( expression=Lag("timestamp", default=None), partition_by=F("distinct_id"), order_by=F("timestamp").asc(), )).annotate(previous_event=Window( expression=Lag("event", default=None), partition_by=F("distinct_id"), order_by=F("timestamp").asc(), ))) sessions_sql, sessions_sql_params = sessions.query.sql_with_params() all_sessions = "\ SELECT *,\ SUM(new_session) OVER (ORDER BY distinct_id, timestamp) AS global_session_id,\ SUM(new_session) OVER (PARTITION BY distinct_id ORDER BY timestamp) AS user_session_id\ FROM (SELECT id, team_id, distinct_id, event, elements_hash, timestamp, properties, CASE WHEN EXTRACT('EPOCH' FROM (timestamp - previous_timestamp)) >= (60 * 30)\ OR previous_timestamp IS NULL \ THEN 1 ELSE 0 END AS new_session \ FROM ({}) AS inner_sessions\ ) AS outer_sessions".format(sessions_sql) return all_sessions, sessions_sql_params
def calculate_sessions( self, events: QuerySet, session_type: Optional[str], date_filter: Dict[str, datetime], team: Team, request: request.Request, ) -> List[Dict[str, Any]]: # format date filter for session view _date_gte = Q() if session_type is None: if request.GET.get("date_from", None): _date_gte = Q( timestamp__gte=date_filter["timestamp__gte"], timestamp__lte=date_filter["timestamp__gte"] + relativedelta(days=1), ) else: dt = datetime.now() dt = dt.replace(hour=0, minute=0, second=0, microsecond=0) _date_gte = Q(timestamp__gte=dt, timestamp__lte=dt + relativedelta(days=1)) sessions = (events.filter(_date_gte).annotate( previous_timestamp=Window( expression=Lag("timestamp", default=None), partition_by=F("distinct_id"), order_by=F("timestamp").asc(), )).annotate(previous_event=Window( expression=Lag("event", default=None), partition_by=F("distinct_id"), order_by=F("timestamp").asc(), ))) sessions_sql, sessions_sql_params = sessions.query.sql_with_params() all_sessions = "\ SELECT *,\ SUM(new_session) OVER (ORDER BY distinct_id, timestamp) AS global_session_id,\ SUM(new_session) OVER (PARTITION BY distinct_id ORDER BY timestamp) AS user_session_id\ FROM (SELECT id, distinct_id, event, elements_hash, timestamp, properties, CASE WHEN EXTRACT('EPOCH' FROM (timestamp - previous_timestamp)) >= (60 * 30)\ OR previous_timestamp IS NULL \ THEN 1 ELSE 0 END AS new_session \ FROM ({}) AS inner_sessions\ ) AS outer_sessions".format(sessions_sql) result: List = [] interval = request.GET.get("interval", None) if session_type == "avg": result = self._session_avg(all_sessions, sessions_sql_params, date_filter, interval) elif session_type == "dist": result = self._session_dist(all_sessions, sessions_sql_params) else: result = self._session_list(all_sessions, sessions_sql_params, team, request) return result
def calculate_sessions(self, events: QuerySet, session_type: Optional[str], filter: Filter, team: Team, offset: int) -> List[Dict[str, Any]]: # format date filter for session view _date_gte = Q() if session_type is None: # if _date_from is not explicitely set we only want to get the last day worth of data # otherwise the query is very slow if filter._date_from and filter.date_to: _date_gte = Q( timestamp__gte=filter.date_from, timestamp__lte=filter.date_to + relativedelta(days=1), ) else: dt = now() dt = dt.replace(hour=0, minute=0, second=0, microsecond=0) _date_gte = Q(timestamp__gte=dt, timestamp__lte=dt + relativedelta(days=1)) else: if not filter.date_from: filter._date_from = (Event.objects.filter( team_id=team).order_by("timestamp")[0].timestamp.replace( hour=0, minute=0, second=0, microsecond=0).isoformat()) sessions = (events.filter(_date_gte).annotate( previous_timestamp=Window( expression=Lag("timestamp", default=None), partition_by=F("distinct_id"), order_by=F("timestamp").asc(), )).annotate(previous_event=Window( expression=Lag("event", default=None), partition_by=F("distinct_id"), order_by=F("timestamp").asc(), ))) sessions_sql, sessions_sql_params = sessions.query.sql_with_params() all_sessions = "\ SELECT *,\ SUM(new_session) OVER (ORDER BY distinct_id, timestamp) AS global_session_id,\ SUM(new_session) OVER (PARTITION BY distinct_id ORDER BY timestamp) AS user_session_id\ FROM (SELECT id, distinct_id, event, elements_hash, timestamp, properties, CASE WHEN EXTRACT('EPOCH' FROM (timestamp - previous_timestamp)) >= (60 * 30)\ OR previous_timestamp IS NULL \ THEN 1 ELSE 0 END AS new_session \ FROM ({}) AS inner_sessions\ ) AS outer_sessions".format(sessions_sql) result: List = [] if session_type == "avg": result = self._session_avg(all_sessions, sessions_sql_params, filter) elif session_type == "dist": result = self._session_dist(all_sessions, sessions_sql_params) else: result = self._session_list(all_sessions, sessions_sql_params, team, filter, offset) return result
def calculate_sessions(self, events: QuerySet, session_type: Optional[str], date_filter: Dict[str, datetime], team: Team, request: request.Request) -> List[Dict[str, Any]]: if not events: return [] # format date filter for session view _date_gte = Q() if session_type is None: if request.GET.get('date_from', None): _date_gte = Q(timestamp__gte=date_filter['timestamp__gte'], timestamp__lte=date_filter['timestamp__gte'] + relativedelta(days=1)) else: dt = events.order_by('-timestamp').values( 'timestamp')[0]['timestamp'] if dt: dt = dt.replace(hour=0, minute=0, second=0, microsecond=0) _date_gte = Q(timestamp__gte=dt, timestamp__lte=dt + relativedelta(days=1)) sessions = events.filter(_date_gte)\ .annotate(previous_timestamp=Window( expression=Lag('timestamp', default=None), partition_by=F('distinct_id'), order_by=F('timestamp').asc() ))\ .annotate(previous_event=Window( expression=Lag('event', default=None), partition_by=F('distinct_id'), order_by=F('timestamp').asc() )) sessions_sql, sessions_sql_params = sessions.query.sql_with_params() all_sessions = '\ SELECT *,\ SUM(new_session) OVER (ORDER BY distinct_id, timestamp) AS global_session_id,\ SUM(new_session) OVER (PARTITION BY distinct_id ORDER BY timestamp) AS user_session_id\ FROM (SELECT id, distinct_id, event, elements_hash, timestamp, properties, CASE WHEN EXTRACT(\'EPOCH\' FROM (timestamp - previous_timestamp)) >= (60 * 30)\ OR previous_timestamp IS NULL \ THEN 1 ELSE 0 END AS new_session \ FROM ({}) AS inner_sessions\ ) AS outer_sessions'.format(sessions_sql) result: List = [] if session_type == 'avg': result = self._session_avg(all_sessions, sessions_sql_params, date_filter) elif session_type == 'dist': result = self._session_dist(all_sessions, sessions_sql_params) else: result = self._session_list(all_sessions, sessions_sql_params, team, date_filter, request) return result
def get_tokens_usage_grouped(self) -> Optional[List[Dict[str, Any]]]: """ :return: List of Dict 'gas_token', 'total', 'number', 'percentage' """ return (SafeMultisigTx.objects.annotate( date=TruncDate("created")).annotate( number=Window(expression=Count("*"), partition_by=[F("gas_token"), F("date")]), percentage=100.0 * Window(expression=Count("*"), partition_by=[F("gas_token"), F("date")]) / Window(expression=Count("*"), partition_by=[F("date")]), ).values("date", "gas_token", "number", "percentage").distinct().order_by("date"))
def get_last_seen_manifest_ids(self, bill_date): """Return a tuple containing the assembly_id of the last seen manifest and a boolean The boolean will state whether or not that manifest has been processed.""" assembly_ids = [] # The following query uses a window function to rank the manifests for all the providers, # and then just pulls out the top ranked (most recent) manifests manifests = ( CostUsageReportManifest.objects.filter(billing_period_start_datetime=bill_date) .annotate( row_number=Window( expression=RowNumber(), partition_by=F("provider_id"), order_by=F("manifest_creation_datetime").desc(), ) ) .order_by("row_number") ) for manifest in [manifest for manifest in manifests if manifest.row_number == 1]: # loop through the manifests and decide if they have finished processing processed = manifest.num_total_files == manifest.num_processed_files # if all of the files for the manifest have been processed we don't want to add it # to assembly_ids because it is safe to delete if not processed: assembly_ids.append(manifest.assembly_id) return assembly_ids
def challenge_leaderboard(request, unique_id): challenge = get_object_or_404(Challenge, unique_id=unique_id) if (not request.user.is_authenticated or request.user != challenge.user) and not challenge.is_published: messages.success(request, "You can't access this challenge.") return redirect('challenge.challenge_list') print(challenge.multipolygon.coords) sequences = Sequence.objects.filter( is_published=True, geometry_coordinates__crosses=challenge.multipolygon) print(sequences) user_json = sequences.values('user').annotate( image_count=Sum('image_count')).order_by('-image_count').annotate( rank=Window(expression=RowNumber())) print(user_json) paginator = Paginator(user_json, 10) page = request.GET.get('page') if not page or page is None: page = 1 try: pItems = paginator.page(page) except PageNotAnInteger: pItems = paginator.page(1) except EmptyPage: pItems = paginator.page(paginator.num_pages) first_num = 1 last_num = paginator.num_pages if paginator.num_pages > 7: if pItems.number < 4: first_num = 1 last_num = 7 elif pItems.number > paginator.num_pages - 3: first_num = paginator.num_pages - 6 last_num = paginator.num_pages else: first_num = pItems.number - 3 last_num = pItems.number + 3 pItems.paginator.pages = range(first_num, last_num + 1) pItems.count = len(pItems) form = ChallengeSearchForm(request.GET) for i in range(len(pItems)): user = CustomUser.objects.get(pk=pItems[i]['user']) if user is None or not user: continue pItems[i]['username'] = user.username return render( request, 'challenge/leaderboard.html', { 'items': pItems, 'challenge': challenge, 'form': form, 'pageName': 'Challenge Leaderboard', 'pageTitle': challenge.name + ' - Challenge Leaderboard' })
def get_tokens_usage_grouped(self) -> Optional[List[Dict[str, Any]]]: """ :return: List of Dict 'gas_token', 'total', 'number', 'percentage' """ return SafeMultisigTx.objects.annotate( date=TruncDate('created')).annotate( number=Window(expression=Count('*'), partition_by=[F('gas_token'), F('date')]), percentage=100.0 * Window(expression=Count('*'), partition_by=[F('gas_token'), F('date')]) / Window(expression=Count('*'), partition_by=[F('date')])).values( 'date', 'gas_token', 'number', 'percentage').distinct().order_by('date')
def ranked_by_average_score(teacher_id): return Student.objects.filter(teacher_id=teacher_id).annotate( average_score=Avg('lessonstudent__score'), class_rank=Window( expression=Rank(), order_by=F('average_score').desc(nulls_last=True))).order_by( F('average_score').desc(nulls_last=True))
def get(self, request): start = str(request.data.get('start')) end = str(request.data.get('end')) if not start or not end: return Response({'msg': 'Missing params'}, status=HTTP_400_BAD_REQUEST) try: start = datetime.datetime.strptime(start, '%d-%m-%Y') end = datetime.datetime.strptime(end, '%d-%m-%Y') except: return Response( {'msg': 'Invalid date format, expected DD-MM-YYYY'}, status=HTTP_400_BAD_REQUEST) movies_query = Movie.objects.annotate( comments=Coalesce( Sum( Case(When(comment__timestamp__range=[start, end], then=1), output_field=IntegerField())), 0), rank=Window(expression=DenseRank(), order_by=F('comments').desc())).order_by( '-comments', 'id') movies = [{ 'id': movie.id, 'comments': movie.comments, 'rank': movie.rank } for movie in movies_query] return Response(movies)
def get(self, order_id, user=None): if user is None: user = self.user profile = self.profile order = self.objects.get(pk=order_id) if not (order.customer.profile == profile or self.user.has_perm('market.view_order') or order.supplier.profile == profile or (order.shipper and order.shipper.profile == profile)): return None # lines = OrderLineRepo(user=self.user).get_by_order(order_id=order_id) lines = OrderLineRepo(user=self.user).get_by_order( order_id=order_id).annotate(row_number=Window( expression=RowNumber(), #partition_by=[F('client')], order_by=F('id').desc())).order_by('row_number') order.lines = lines if order.customer.id == profile.id: order.lines = lines if order.supplier.profile.id == profile.id: if order.status == OrderStatusEnum.CONFIRMED: order.accept_date = datetime.datetime.now() order.status = OrderStatusEnum.ACCEPTED order.save() if order is not None: order.lines = lines if not order.customer == profile: order.customer = order.customer return order
def get(self, request, format=None): stores = (Store.objects.all().order_by("-st_time_spent").annotate( rank=Window(expression=Rank(), order_by=F("st_time_spent").desc())) ) serializer = StoreSerializer(stores, many=True) return Response(serializer.data)
class TopViewSet(viewsets.ModelViewSet): serializer_class = TopSerializer queryset = Movie.objects.annotate( total_comments=Count('comments'), rank=Window(expression=RowNumber(), order_by=F('total_comments').desc())).order_by( '-total_comments')[:10] http_method_names = ['get']
def top(self): return super().get_queryset().values('movie', ).annotate( total_comments=Count('id'), rank=Window( expression=DenseRank(), order_by=F('total_comments').desc(), ), ).order_by('-total_comments')
def qs_with_collection(queryset: QuerySet, **_kwargs) -> QuerySet: return queryset.annotate(sort_order=Window( expression=DenseRank(), order_by=( F("collectionproduct__sort_order").asc(nulls_last=True), F("collectionproduct__id"), ), ))
def get_map_feature_values(): map_feature_json = MapFeature.objects.all().values('value').annotate( image_count=Count('value')).order_by('-image_count').annotate( rank=Window(expression=RowNumber())) map_feature_values = [['all_values', 'All Values']] for map_feature in map_feature_json: map_feature_values.append([map_feature['value'], map_feature['value']]) return tuple(map_feature_values)
def leaderboard_queryset() -> QuerySet: rank_window = Window(expression=Rank(), order_by=F('elo').desc()) board = ( Leaderboard.objects.select_related('profile').order_by('-elo'). annotate(name=F("profile__name")).annotate(rank=rank_window).annotate( win_rate=Case(When(games=0, then=0), default=(Decimal('1.0') * F("wins") / F("games")) * 100, output_field=DecimalField()))) return board
def get_queryset(self): from_date = self.request.query_params.get('from_date') to_date = self.request.query_params.get('to_date') movies = super(TopViewSet, self).get_queryset() if from_date and to_date: movies = movies.filter(year__range=[from_date, to_date]) return movies.annotate(comments_count=Count('comments'), rank=Window(expression=Rank(), order_by=F('comments_count').desc()), )
def list(self, request): team = request.user.team_set.get() resp = [] date_query = request_to_date_query(request.GET) sessions = Event.objects.filter( team=team, event='$pageview', **date_query )\ .annotate(previous_timestamp=Window( expression=Lag('timestamp', default=None), partition_by=F('distinct_id'), order_by=F('timestamp').asc() )) sessions_sql, sessions_sql_params = sessions.query.sql_with_params() cursor = connection.cursor() cursor.execute( '\ SELECT source_event, target_event, count(*) from (\ SELECT event_number || \'_\' || current_url as target_event,LAG(event_number || \'_\' || current_url, 1) OVER (\ PARTITION BY session\ ) AS source_event from \ (\ SELECT properties->> \'$current_url\' as current_url, sessionified.session\ ,ROW_NUMBER() OVER (\ PARTITION BY distinct_id\ ,session ORDER BY timestamp\ ) AS event_number\ FROM (\ SELECT events_notated.*, SUM(new_session) OVER (\ ORDER BY distinct_id\ ,timestamp\ ) AS session\ FROM (\ SELECT *, CASE WHEN EXTRACT(\'EPOCH\' FROM (timestamp - previous_timestamp)) >= (60 * 30) OR previous_timestamp IS NULL THEN 1 ELSE 0 END AS new_session\ FROM ({}) AS inner_sessions \ ) as events_notated \ ) as sessionified\ ) as final\ where event_number <= 4\ ) as counts\ where source_event is not null and target_event is not null and SUBSTRING(source_event, 3) != SUBSTRING(target_event, 3)\ group by source_event, target_event order by count desc limit 15\ '.format(sessions_sql), sessions_sql_params) rows = cursor.fetchall() for row in rows: resp.append({'source': row[0], 'target': row[1], 'value': row[2]}) resp = sorted(resp, key=lambda x: x['value'], reverse=True) return Response(resp)
def get_queryset(self): queryset = Player.objects.annotate(rank=Window( expression=DenseRank(), order_by=F('points').desc(), )) country_iso_code = self.kwargs['country_iso_code'] if country_iso_code: queryset.filter(country_iso_code=country_iso_code.upper()) return queryset
def append_context(self, queryset): """Appending collection data to context.""" repo_version = self._repository_version collections_qs = queryset.annotate(available_versions=ArrayAgg("versions__version")) versions_context = {} for collection_id, available_versions in collections_qs.values_list( "pk", "available_versions" ): versions_context[collection_id] = available_versions self.available_versions_context = versions_context # needed by get__serializer_context self._deprecation collections = Collection.objects.filter( pk__in=versions_context.keys(), versions__version_memberships__repository=repo_version.repository, ).annotate( repo_version_added_at=Window( expression=FirstValue( "versions__version_memberships__version_added__pulp_last_updated" ), partition_by=[F("versions__collection_id")], order_by=F("versions__version_memberships__version_added__pulp_last_updated").desc( nulls_last=True ), ), repo_version_removed_at=Window( expression=FirstValue( "versions__version_memberships__version_removed__pulp_last_updated" ), partition_by=[F("versions__collection_id")], order_by=F( "versions__version_memberships__version_removed__pulp_last_updated" ).desc(nulls_last=True), ), ) return collections.distinct("versions__collection_id").only( "pulp_created", "name", "namespace" )
def get_queryset(self): print(self.request.GET.get('limit')) rank_window = Window(expression=Rank(), order_by=F('adjusted_win_rate').desc()) filter = Q(rosters__match__ranked=True) season = self.request.GET.get('season', '') league = self.request.GET.get('league', '') if league != '': filter &= Q(rosters__match__league__id=league) if season != '': filter &= Q(rosters__match__season__id=season) profiles = ( models.SC2Profile .objects .annotate( total_matches=Count( 'rosters__match', filter=filter, distinct=True ), total_wins=Count( 'rosters__match__match_winners', filter=filter & Q(rosters__match__match_winners__profile__id=F('id')), distinct=True ), total_losses=Count( 'rosters__match__match_losers', filter=filter & Q(rosters__match__match_losers__profile__id=F('id')), distinct=True ), total_draws=F('total_matches') - F('total_wins') - F('total_losses'), win_rate=Case( When(total_matches=0, then=0), default=(Decimal('1.0') * F("total_wins") / ( F("total_wins") + F("total_losses") + (0.5 * F("total_draws")))) * 100, output_field=DecimalField(), ), rate=Case(*self.build_adj_rates(), default=1, output_field=DecimalField(decimal_places=3, max_digits=5) ), adjusted_win_rate=F('rate') * F('win_rate'), rank=rank_window ) .filter(total_matches__gte=1) .order_by('rank') ) return profiles
def fill_record_index(apps, _): ObjectRecord = apps.get_model("core", "ObjectRecord") records = ObjectRecord.objects.annotate(row_number=Window( expression=RowNumber(), partition_by=[F("object")], order_by=[F("start_at"), F("id")], )).order_by("object") for record in records: if record.index != record.row_number: record.index = record.row_number record.save()
def list(self, request, *args, **kwargs): date_from = request.query_params.get('date_from', None) date_to = request.query_params.get('date_to', None) if not date_from or not date_to: return Response(status=status.HTTP_400_BAD_REQUEST) date_from = datetime.strptime(date_from, '%Y-%m-%d') date_to = datetime.strptime(date_to, '%Y-%m-%d') queryset = self.get_queryset() queryset = queryset.annotate(total_comments=Count('comments', filter=(Q(comments__created_date__gte=date_from) & Q(comments__created_date__lt=date_to))))\ .order_by('-total_comments')\ .annotate(rank=Window(expression=DenseRank(), order_by=F('total_comments').desc())) serializer = TopMoviesSerializer(queryset, many=True) return Response(serializer.data)
def get_queryset(self): movies = Movie.objects.all() dense_rank = Window(expression=DenseRank(), order_by=F('total_comments').desc()) movies_sort = movies.annotate(total_comments=Count( 'Comments')).order_by('-total_comments').annotate(rank=dense_rank) return movies_sort if self.request.query_params: date_start = self.request.query_params.get('date_start') date_end = self.request.query_params.get('date_end') sorted_date_movies = movies_sort.filter( Comments__pub_date__gte=date_start, Comments__pub_date__lte=date_end) return sorted_date_movies
def get(self, request: Request): season = request.GET.get('season') league = request.GET.get('league') rank_window = Window(expression=Rank(), order_by=F('adjusted_win_rate').desc()) filter = Q(rosters__match__ranked=True) if league is not None: filter &= Q(rosters__match__league=league) if season is not None: filter &= Q(rosters__match__season=season) profiles = (SC2Profile.objects.annotate( total_matches=Count('rosters__match', filter=filter, distinct=True), total_wins=Count( 'rosters__match__match_winners', filter=filter & Q(rosters__match__match_winners__profile__id=F('id')), distinct=True), total_losses=Count( 'rosters__match__match_losers', filter=filter & Q(rosters__match__match_losers__profile__id=F('id')), distinct=True), total_draws=F('total_matches') - F('total_wins') - F('total_losses'), win_rate=Case( When(total_matches=0, then=0), default=(Decimal('1.0') * F("total_wins") / (F("total_wins") + F("total_losses") + (0.5 * F("total_draws")))) * 100, output_field=DecimalField(), ), rate=Case(*self.build_adj_rates(), default=1, output_field=DecimalField(decimal_places=3, max_digits=5)), adjusted_win_rate=F('rate') * F('win_rate'), rank=rank_window).filter(total_matches__gte=1).order_by('rank')) return Response(StandingsSerializer(profiles, many=True).data, status=200)
def gentella_html(request): print('@@@@@@@@@@@@@@@@') context = {} # The template to be loaded as per gentelella. # All resource paths for gentelella end in .html. # Pick out the html file name from the url. And load that template. load_template = request.path.split('/')[-1] logger.debug(' function name gentella_html : %s ' % load_template) if load_template == 'tables_dynamic.html': list = Film.objects.all() list_cnt = Film.objects.count() logger.debug(' function name list_cnt : %s ' % list_cnt) context = {'list': list} elif load_template == 'tables_dynamic_customer_list.html': # list = CustomerList.objects.all() # sqlite3 3.25 버전이상에서 Window 함수를 사용할수 있음. list = CustomerList.objects.annotate( row_number=Window(expression=RowNumber(), partition_by=[F('sid')], order_by=F('id').desc())).order_by( 'row_number', 'id') # ).order_by('row_number', 'client') list_cnt = CustomerList.objects.count() logger.debug(' function name list_cnt : %s ' % list_cnt) context = {'list': list} elif load_template == 'tables_dynamic_customer_list2.html': # # list = CustomerList.objects.all() # # sqlite3 3.25 버전이상에서 Window 함수를 사용할수 있음. # list = CustomerList.objects.annotate(row_number=Window( # expression=RowNumber(), # partition_by=[F('sid')], # order_by=F('id').desc()) # ).order_by('row_number', 'id') # # ).order_by('row_number', 'client') # list_cnt = CustomerList.objects.count() # logger.debug(' function name list_cnt : %s ' % list_cnt) # context = {'list': list} pass template = loader.get_template('sakila/' + load_template) return HttpResponse(template.render(context, request))
def get(self, request): """Extract dates range from query_params and return top movies.""" start, end = self.get_start_end_date_from_request(request) # pull ranked movies sorted by number of comments in given date range movies_query = Movie.objects.annotate( total_comments=Coalesce( Sum( Case(When(comment__created__range=[start, end], then=1), output_field=IntegerField())), 0), rank=Window( expression=DenseRank(), order_by=F('total_comments').desc(), )).order_by('-total_comments', 'id') # extract needed fields to response # it's too trivial to use Serializer here movies = [{ 'movie_id': movie.id, 'total_comments': movie.total_comments, 'rank': movie.rank } for movie in movies_query] return Response(movies)
def leaderboard(request, exam_id): # Authentication user = request.user exam = get_object_or_404(Exam, pk=exam_id) # log(user=str(user), exam=exam_id, end=exam.ended, can_view=user.can_view_exam(exam), cvl=user.can_view_leaderboard(exam)) if not user.can_view_leaderboard(exam): raise PermissionDenied("You do not have permission to view the " "leaderboard for this exam") # if exam.is_ai: # return ai_leaderboard(request, exam) problems = exam.problem_list comps = exam.competitors.annotate(rank=Window( expression=Rank(), order_by=F('total_score').desc()), ).order_by('-total_score') items_per_page = per_page(comps.count()) if user.is_mathlete: # should be: if user is participating mathlete, because leaderboard can be viewed by non-participants after the contest ends comp = user.rel_comps(exam).first() # replace with participating comp # maybe break ties by time so we can just use score__lte to get rank? for i, c in enumerate(comps): if c.id == comp.id: default_page = i // items_per_page + 1 break else: default_page = 1 paginator = Paginator(comps, items_per_page) page_number = request.GET.get('page', default=default_page) page_obj = paginator.get_page(page_number) context = { 'problems': problems, 'exam': exam, 'page_obj': page_obj, 'rel_comps': user.rel_comps(exam), 'all_pages': [paginator.page(i) for i in paginator.page_range] } return render(request, 'exam/leaderboard.html', context)
def calculate_paths(self, filter: PathFilter, team: Team): date_query = request_to_date_query({"date_from": filter._date_from, "date_to": filter._date_to}, exact=False) resp = [] prop_type = filter.prop_type event, event_filter = filter.target_event start_comparator = filter.comparator sessions = ( Event.objects.add_person_id(team.pk) .filter(team=team, **(event_filter), **date_query) .filter( ~Q(event__in=["$autocapture", "$pageview", "$identify", "$pageleave", "$screen"]) if event is None else Q() ) .filter( properties_to_Q(filter.properties, team_id=team.pk, filter_test_accounts=filter.filter_test_accounts) if filter and (filter.properties or filter.filter_test_accounts) else Q() ) .annotate( previous_timestamp=Window( expression=Lag("timestamp", default=None), partition_by=F("person_id"), order_by=F("timestamp").asc(), ) ) ) sessions_sql, sessions_sql_params = sessions.query.sql_with_params() if event == "$autocapture": sessions_sql = self._add_elements(query_string=sessions_sql) events_notated = "\ SELECT *, CASE WHEN EXTRACT('EPOCH' FROM (timestamp - previous_timestamp)) >= (60 * 30) OR previous_timestamp IS NULL THEN 1 ELSE 0 END AS new_session\ FROM ({}) AS inner_sessions\ ".format( sessions_sql ) sessionified = "\ SELECT events_notated.*, SUM(new_session) OVER (\ ORDER BY person_id\ ,timestamp\ ) AS session\ FROM ({}) as events_notated\ ".format( events_notated ) if filter and filter.start_point: sessionified = self._apply_start_point( start_comparator=start_comparator, query_string=sessionified, start_point=filter.start_point, ) final = "\ SELECT {} as path_type, id, sessionified.session\ ,ROW_NUMBER() OVER (\ PARTITION BY person_id\ ,session ORDER BY timestamp\ ) AS event_number\ FROM ({}) as sessionified\ ".format( prop_type, sessionified ) counts = "\ SELECT event_number || '_' || path_type as target_event, id as target_id, LAG(event_number || '_' || path_type, 1) OVER (\ PARTITION BY session\ ) AS source_event , LAG(id, 1) OVER (\ PARTITION BY session\ ) AS source_id from \ ({}) as final\ where event_number <= 4\ ".format( final ) query = "\ SELECT source_event, target_event, MAX(target_id), MAX(source_id), count(*) from ({}) as counts\ where source_event is not null and target_event is not null\ group by source_event, target_event order by count desc limit 20\ ".format( counts ) cursor = connection.cursor() cursor.execute(query, sessions_sql_params) rows = cursor.fetchall() for row in rows: resp.append( {"source": row[0], "target": row[1], "target_id": row[2], "source_id": row[3], "value": row[4],} ) resp = sorted(resp, key=lambda x: x["value"], reverse=True) return resp