def maybe_alert(cls, project_id, message, group_id=None): now = timezone.now() manager = cls.objects # We only create an alert based on: # - an alert for the project hasn't been created in the last 30 minutes # - an alert for the event hasn't been created in the last 60 minutes # TODO: there is a race condition if we're calling this function for the same project if manager.filter(project=project_id, datetime__gte=now - timedelta(minutes=60)).exists(): return if manager.filter(project=project_id, group=group_id, datetime__gte=now - timedelta(minutes=60)).exists(): return alert = manager.create( project_id=project_id, group_id=group_id, datetime=now, message=message, ) if not group_id and has_trending(): # Capture the top 5 trending events at the time of this error related_groups = Group.objects.get_accelerated( [project_id], minutes=MINUTE_NORMALIZATION)[:5] for group in related_groups: AlertRelatedGroup.objects.create( group=group, alert=alert, ) return alert
def dashboard(request): project_list = get_project_list(request.user, key='slug') if len(project_list) == 0 and not request.user.is_authenticated(): return HttpResponseRedirect(get_login_url()) if project_list: cutoff = datetime.datetime.now() - datetime.timedelta(days=1) base_qs = Group.objects.filter( project__in=project_list.values(), status=0, ).select_related('project').order_by('-score') if has_trending(): top_event_list = list(Group.objects.get_accelerated(base_qs, minutes=60 * 24)[:DASHBOARD_EVENTS]) else: top_event_list = list(base_qs.filter( last_seen__gte=cutoff )[:DASHBOARD_EVENTS]) new_event_list = list(base_qs.filter( active_at__gte=cutoff, )[:DASHBOARD_EVENTS]) else: top_event_list = None new_event_list = None return render_to_response('sentry/dashboard.html', { 'top_event_list': top_event_list, 'new_event_list': new_event_list, }, request)
def get_group_trends(request, project=None): minutes = int(request.REQUEST.get("minutes", 15)) limit = min(100, int(request.REQUEST.get("limit", 10))) if project: project_dict = {project.pk: project} else: project_dict = get_project_list(request.user) base_qs = Group.objects.filter(project__in=project_dict.keys(), status=0).order_by("-score") if has_trending(): group_list = list(Group.objects.get_accelerated(base_qs, minutes=(minutes))[:limit]) else: cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff group_list = list(base_qs.filter(last_seen__gte=cutoff_dt)[:limit]) for group in group_list: group._project_cache = project_dict.get(group.project_id) data = transform_groups(request, group_list, template="sentry/partial/_group_small.html") response = HttpResponse(json.dumps(data)) response["Content-Type"] = "application/json" return response
def test_get_accelerrated(self): if not has_trending(): return group = Group.objects.from_kwargs(1, message='foo', checksum='a' * 32).group group_list = list(Group.objects.get_accelerated(Group.objects.all(), minutes=settings.MINUTE_NORMALIZATION)[0:100]) self.assertEquals(len(group_list), 1) self.assertEquals(group_list[0], group)
def get_group_trends(request, team=None, project=None): minutes = int(request.REQUEST.get("minutes", 15)) limit = min(100, int(request.REQUEST.get("limit", 10))) if not team and project: project_list = [project] else: project_list = Project.objects.get_for_user(request.user, team=team) project_dict = dict((p.id, p) for p in project_list) base_qs = Group.objects.filter(project__in=project_list, status=0) if has_trending(): group_list = list(Group.objects.get_accelerated(project_dict, base_qs, minutes=(minutes))[:limit]) else: cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff group_list = list( base_qs.filter(status=STATUS_UNRESOLVED, last_seen__gte=cutoff_dt) .extra(select={"sort_value": "score"}) .order_by("-score")[:limit] ) for group in group_list: group._project_cache = project_dict.get(group.project_id) data = to_json(group_list, request) response = HttpResponse(data) response["Content-Type"] = "application/json" return response
def get_group_trends(request, project=None): minutes = int(request.REQUEST.get('minutes', 15)) limit = min(100, int(request.REQUEST.get('limit', 10))) if project: project_dict = {project.pk: project} else: project_dict = get_project_list(request.user) base_qs = Group.objects.filter( project__in=project_dict.keys(), status=0, ) if has_trending(): group_list = list(Group.objects.get_accelerated(base_qs, minutes=( minutes ))[:limit]) else: cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff group_list = list(base_qs.filter( last_seen__gte=cutoff_dt ).order_by('-score')[:limit]) for group in group_list: group._project_cache = project_dict.get(group.project_id) data = to_json(group_list, request) response = HttpResponse(data) response['Content-Type'] = 'application/json' return response
def maybe_alert(cls, project_id, message, group_id=None): now = timezone.now() manager = cls.objects # We only create an alert based on: # - an alert for the project hasn't been created in the last 30 minutes # - an alert for the event hasn't been created in the last 60 minutes # TODO: there is a race condition if we're calling this function for the same project if manager.filter( project=project_id, datetime__gte=now - timedelta(minutes=60)).exists(): return if manager.filter( project=project_id, group=group_id, datetime__gte=now - timedelta(minutes=60)).exists(): return alert = manager.create( project_id=project_id, group_id=group_id, datetime=now, message=message, ) if not group_id and has_trending(): # Capture the top 5 trending events at the time of this error related_groups = Group.objects.get_accelerated([project_id], minutes=MINUTE_NORMALIZATION)[:5] for group in related_groups: AlertRelatedGroup.objects.create( group=group, alert=alert, ) return alert
def group_list(request, team, project): try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 response = _get_group_list( request=request, project=project, ) # XXX: this is duplicate in _get_group_list sort_label = SORT_OPTIONS[response['sort']] has_realtime = page == 1 return render_to_response('sentry/groups/group_list.html', { 'team': project.team, 'project': project, 'from_date': response['date_from'], 'to_date': response['date_to'], 'date_type': response['date_type'], 'has_realtime': has_realtime, 'event_list': response['event_list'], 'today': response['today'], 'sort': response['sort'], 'sort_label': sort_label, 'filters': response['filters'], 'SORT_OPTIONS': SORT_OPTIONS, 'HAS_TRENDING': has_trending(), }, request)
def group_list(request, team, project): try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 response = _get_group_list( request=request, project=project, ) # XXX: this is duplicate in _get_group_list sort_label = SORT_OPTIONS[response['sort']] has_realtime = page == 1 return render_to_response( 'sentry/groups/group_list.html', { 'team': project.team, 'project': project, 'from_date': response['date_from'], 'to_date': response['date_to'], 'date_type': response['date_type'], 'has_realtime': has_realtime, 'event_list': response['event_list'], 'today': response['today'], 'sort': response['sort'], 'sort_label': sort_label, 'filters': response['filters'], 'SORT_OPTIONS': SORT_OPTIONS, 'HAS_TRENDING': has_trending(), }, request)
def get_group_trends(request, project=None): minutes = int(request.REQUEST.get('minutes', 15)) limit = min(100, int(request.REQUEST.get('limit', 10))) if project: project_list = [project] else: project_list = get_project_list(request.user).values() cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = datetime.datetime.now() - cutoff base_qs = Group.objects.filter( project__in=project_list, status=0, ).select_related('project').order_by('-score') if has_trending(): group_list = list(Group.objects.get_accelerated(base_qs, minutes=( (cutoff.days * 1440) + (cutoff.seconds * 60) ))[:limit]) else: group_list = list(base_qs.filter( last_seen__gte=cutoff_dt )[:limit]) data = transform_groups(request, group_list, template='sentry/partial/_group_small.html') response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def group_list(request, team, project): try: page = int(request.GET.get("p", 1)) except (TypeError, ValueError): page = 1 response = _get_group_list(request=request, project=project) # XXX: this is duplicate in _get_group_list sort_label = SORT_OPTIONS[response["sort"]] has_realtime = page == 1 return render_to_response( "sentry/groups/group_list.html", { "team": project.team, "project": project, "from_date": response["date_from"], "to_date": response["date_to"], "date_type": response["date_type"], "has_realtime": has_realtime, "event_list": response["event_list"], "today": response["today"], "sort": response["sort"], "sort_label": sort_label, "filters": response["filters"], "SORT_OPTIONS": SORT_OPTIONS, "HAS_TRENDING": has_trending(), }, request, )
def get_group_trends(request, project=None): minutes = int(request.REQUEST.get('minutes', 15)) limit = min(100, int(request.REQUEST.get('limit', 10))) if project: project_dict = {project.pk: project} else: project_dict = get_project_list(request.user) base_qs = Group.objects.filter( project__in=project_dict.keys(), status=0, ) if has_trending(): group_list = list( Group.objects.get_accelerated(base_qs, minutes=(minutes))[:limit]) else: cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff group_list = list( base_qs.filter( last_seen__gte=cutoff_dt).order_by('-score')[:limit]) for group in group_list: group._project_cache = project_dict.get(group.project_id) data = to_json(group_list, request) response = HttpResponse(data) response['Content-Type'] = 'application/json' return response
def group_list(request, project, view_id=None): try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 if view_id: try: view = View.objects.get_from_cache(pk=view_id) except View.DoesNotExist: return HttpResponseRedirect(reverse('sentry', args=[project.pk])) else: view = None filters, event_list = _get_group_list( request=request, project=project, view=view, ) # XXX: this is duplicate in _get_group_list sort = request.GET.get('sort') if sort not in SORT_OPTIONS: sort = settings.DEFAULT_SORT_OPTION sort_label = SORT_OPTIONS[sort] since = request.GET.get('since') if since not in DATE_OPTIONS: since = settings.DEFAULT_DATE_OPTION since_label = DATE_OPTIONS[since] today = datetime.datetime.utcnow() has_realtime = page == 1 return render_to_response('sentry/groups/group_list.html', { 'project': project, 'has_realtime': has_realtime, 'event_list': event_list, 'today': today, 'sort': sort, 'sort_label': sort_label, 'since': since, 'since_label': since_label, 'filters': filters, 'view': view, 'SORT_OPTIONS': SORT_OPTIONS, 'DATE_OPTIONS': DATE_OPTIONS, 'HAS_TRENDING': has_trending(), 'PAGE': 'dashboard', }, request)
def group_list(request, project, view_id=None): try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 if view_id: try: view = View.objects.get_from_cache(pk=view_id) except View.DoesNotExist: return HttpResponseRedirect(reverse('sentry', args=[project.pk])) else: view = None filters, event_list = _get_group_list( request=request, project=project, view=view, ) # XXX: this is duplicate in _get_group_list sort = request.GET.get('sort') if sort not in SORT_OPTIONS: sort = settings.DEFAULT_SORT_OPTION sort_label = SORT_OPTIONS[sort] since = request.GET.get('since') if since not in DATE_OPTIONS: since = settings.DEFAULT_DATE_OPTION since_label = DATE_OPTIONS[since] today = datetime.datetime.utcnow() has_realtime = page == 1 return render_to_response( 'sentry/groups/group_list.html', { 'project': project, 'has_realtime': has_realtime, 'event_list': event_list, 'today': today, 'sort': sort, 'sort_label': sort_label, 'since': since, 'since_label': since_label, 'filters': filters, 'view': view, 'SORT_OPTIONS': SORT_OPTIONS, 'DATE_OPTIONS': DATE_OPTIONS, 'HAS_TRENDING': has_trending(), 'PAGE': 'dashboard', }, request)
def group_list(request, project): try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 view_id = request.GET.get('view') if view_id: try: view = View.objects.get_from_cache(pk=int(view_id)) except View.DoesNotExist: return HttpResponseRedirect(reverse('sentry', args=[project.slug])) else: view = None response = _get_group_list( request=request, project=project, view=view, ) # XXX: this is duplicate in _get_group_list sort_label = SORT_OPTIONS[response['sort']] has_realtime = page == 1 return render_to_response( 'sentry/groups/group_list.html', { 'project': project, 'from_date': response['date_from'], 'to_date': response['date_to'], 'has_realtime': has_realtime, 'event_list': response['event_list'], 'today': response['today'], 'sort': response['sort'], 'sort_label': sort_label, 'filters': response['filters'], 'view': view, 'SORT_OPTIONS': SORT_OPTIONS, 'HAS_TRENDING': has_trending(), 'PAGE': 'dashboard', }, request)
def group_list(request, project): try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 view_id = request.GET.get('view') if view_id: try: view = View.objects.get_from_cache(pk=int(view_id)) except View.DoesNotExist: return HttpResponseRedirect(reverse('sentry', args=[project.slug])) else: view = None response = _get_group_list( request=request, project=project, view=view, ) # XXX: this is duplicate in _get_group_list sort_label = SORT_OPTIONS[response['sort']] has_realtime = page == 1 return render_to_response('sentry/groups/group_list.html', { 'project': project, 'from_date': response['date_from'], 'to_date': response['date_to'], 'has_realtime': has_realtime, 'event_list': response['event_list'], 'today': response['today'], 'sort': response['sort'], 'sort_label': sort_label, 'filters': response['filters'], 'view': view, 'SORT_OPTIONS': SORT_OPTIONS, 'HAS_TRENDING': has_trending(), 'PAGE': 'dashboard', }, request)
def get_group_trends(request, team=None, project=None): minutes = int(request.REQUEST.get('minutes', 15)) limit = min(100, int(request.REQUEST.get('limit', 10))) if not team and project: project_list = [project] else: project_list = Project.objects.get_for_user(request.user, team=team) project_dict = dict((p.id, p) for p in project_list) base_qs = Group.objects.filter( project__in=project_list, status=0, ) if has_trending(): group_list = list( Group.objects.get_accelerated(project_dict, base_qs, minutes=(minutes))[:limit]) else: cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff group_list = list( base_qs.filter( status=STATUS_UNRESOLVED, last_seen__gte=cutoff_dt).extra(select={ 'sort_value': 'score' }).order_by('-score')[:limit]) for group in group_list: group._project_cache = project_dict.get(group.project_id) data = to_json(group_list, request) response = HttpResponse(data) response['Content-Type'] = 'application/json' return response
def group_list(request, project): try: page = int(request.GET.get("p", 1)) except (TypeError, ValueError): page = 1 view_id = request.GET.get("view") if view_id: try: view = View.objects.get_from_cache(pk=int(view_id)) except View.DoesNotExist: return HttpResponseRedirect(reverse("sentry", args=[project.slug])) else: view = None response = _get_group_list(request=request, project=project, view=view) # XXX: this is duplicate in _get_group_list sort_label = SORT_OPTIONS[response["sort"]] has_realtime = page == 1 return render_to_response( "sentry/groups/group_list.html", { "project": project, "from_date": response["date_from"], "to_date": response["date_to"], "has_realtime": has_realtime, "event_list": response["event_list"], "today": response["today"], "sort": response["sort"], "sort_label": sort_label, "filters": response["filters"], "view": view, "SORT_OPTIONS": SORT_OPTIONS, "HAS_TRENDING": has_trending(), "PAGE": "dashboard", }, request, )
def _get_group_list(request, project): filters = [] for cls in get_filters(Group, project): try: filters.append(cls(request, project)) except Exception as e: logger = logging.getLogger('sentry.filters') logger.exception('Error initializing filter %r: %s', cls, e) event_list = Group.objects if request.user.is_authenticated() and request.GET.get('bookmarks'): event_list = event_list.filter( bookmark_set__project=project, bookmark_set__user=request.user, ) else: event_list = event_list.filter(project=project) for filter_ in filters: try: if not filter_.is_set(): continue event_list = filter_.get_query_set(event_list) except Exception as e: logger = logging.getLogger('sentry.filters') logger.exception('Error processing filter %r: %s', cls, e) date_from = request.GET.get('df') time_from = request.GET.get('tf') date_to = request.GET.get('dt') time_to = request.GET.get('tt') date_type = request.GET.get('date_type') today = timezone.now() # date format is Y-m-d if any(x is not None for x in [date_from, time_from, date_to, time_to]): date_from, date_to = parse_date(date_from, time_from), parse_date( date_to, time_to) else: date_from = today - datetime.timedelta(days=5) date_to = None if date_type == 'first_seen': if date_from: event_list = event_list.filter(first_seen__gte=date_from) elif date_to: event_list = event_list.filter(first_seen__lte=date_to) else: if date_from and date_to: event_list = event_list.filter( groupcountbyminute__date__gte=date_from, groupcountbyminute__date__lte=date_to, ) elif date_from: event_list = event_list.filter(last_seen__gte=date_from) elif date_to: event_list = event_list.filter(last_seen__lte=date_to) sort = request.GET.get('sort') or request.session.get('streamsort') if sort not in SORT_OPTIONS: sort = DEFAULT_SORT_OPTION # Save last sort in session if sort != request.session.get('streamsort'): request.session['streamsort'] = sort if sort.startswith('accel_') and not has_trending(): sort = DEFAULT_SORT_OPTION engine = get_db_engine('default') if engine.startswith('sqlite'): score_clause = SQLITE_SORT_CLAUSES.get(sort) filter_clause = SQLITE_SCORE_CLAUSES.get(sort) elif engine.startswith('mysql'): score_clause = MYSQL_SORT_CLAUSES.get(sort) filter_clause = MYSQL_SCORE_CLAUSES.get(sort) elif engine.startswith('oracle'): score_clause = ORACLE_SORT_CLAUSES.get(sort) filter_clause = ORACLE_SCORE_CLAUSES.get(sort) elif engine in ('django_pytds', 'sqlserver_ado', 'sql_server.pyodbc'): score_clause = MSSQL_SORT_CLAUSES.get(sort) filter_clause = MSSQL_SCORE_CLAUSES.get(sort) else: score_clause = SORT_CLAUSES.get(sort) filter_clause = SCORE_CLAUSES.get(sort) # IMPORTANT: All filters must already be applied once we reach this point if sort == 'tottime': event_list = event_list.filter(time_spent_count__gt=0) elif sort == 'avgtime': event_list = event_list.filter(time_spent_count__gt=0) elif sort.startswith('accel_'): event_list = Group.objects.get_accelerated([project.id], event_list, minutes=int( sort.split('_', 1)[1])) if score_clause: event_list = event_list.extra(select={'sort_value': score_clause}, ) # HACK: don't sort by the same column twice if sort == 'date': event_list = event_list.order_by('-last_seen') else: event_list = event_list.order_by('-sort_value', '-last_seen') cursor = request.GET.get('cursor', request.GET.get('c')) if cursor: event_list = event_list.extra( where=['%s > %%s' % filter_clause], params=[float(cursor)], ) return { 'filters': filters, 'event_list': event_list, 'date_from': date_from, 'date_to': date_to, 'today': today, 'sort': sort, 'date_type': date_type }
event_list = event_list.filter(views=view) for filter_ in filters: try: if not filter_.is_set(): continue event_list = filter_.get_query_set(event_list) except Exception, e: logger = logging.getLogger('sentry.filters') logger.exception('Error processing filter %r: %s', cls, e) sort = request.GET.get('sort') if sort not in SORT_OPTIONS: sort = settings.DEFAULT_SORT_OPTION if sort.startswith('accel_') and not has_trending(): sort = settings.DEFAULT_SORT_OPTION engine = get_db_engine('default') if engine.startswith('sqlite'): sort_clause = SQLITE_SORT_CLAUSES.get(sort) elif engine.startswith('mysql'): sort_clause = MYSQL_SORT_CLAUSES.get(sort) else: sort_clause = SORT_CLAUSES.get(sort) if sort == 'tottime': event_list = event_list.filter(time_spent_count__gt=0) elif sort == 'avgtime': event_list = event_list.filter(time_spent_count__gt=0) elif sort.startswith('accel_'):
def setUp(self): if not has_trending(): raise SkipTest('This database does not support trends.')
groupcountbyminute__date__lte=date_to, ) elif date_from: event_list = event_list.filter(last_seen__gte=date_from) elif date_to: event_list = event_list.filter(last_seen__lte=date_to) sort = request.GET.get('sort') or request.session.get('streamsort') if sort not in SORT_OPTIONS: sort = DEFAULT_SORT_OPTION # Save last sort in session if sort != request.session.get('streamsort'): request.session['streamsort'] = sort if sort.startswith('accel_') and not has_trending(): sort = DEFAULT_SORT_OPTION engine = get_db_engine('default') if engine.startswith('sqlite'): score_clause = SQLITE_SORT_CLAUSES.get(sort) filter_clause = SQLITE_SCORE_CLAUSES.get(sort) elif engine.startswith('mysql'): score_clause = MYSQL_SORT_CLAUSES.get(sort) filter_clause = MYSQL_SCORE_CLAUSES.get(sort) elif engine.startswith('oracle'): score_clause = ORACLE_SORT_CLAUSES.get(sort) filter_clause = ORACLE_SCORE_CLAUSES.get(sort) elif engine in ('django_pytds', 'sqlserver_ado', 'sql_server.pyodbc'): score_clause = MSSQL_SORT_CLAUSES.get(sort) filter_clause = MSSQL_SCORE_CLAUSES.get(sort)
def _get_group_list(request, project): filters = [] for cls in get_filters(Group, project): try: filters.append(cls(request, project)) except Exception as e: logger = logging.getLogger('sentry.filters') logger.exception('Error initializing filter %r: %s', cls, e) event_list = Group.objects if request.user.is_authenticated() and request.GET.get('bookmarks'): event_list = event_list.filter( bookmark_set__project=project, bookmark_set__user=request.user, ) else: event_list = event_list.filter(project=project) for filter_ in filters: try: if not filter_.is_set(): continue event_list = filter_.get_query_set(event_list) except Exception as e: logger = logging.getLogger('sentry.filters') logger.exception('Error processing filter %r: %s', cls, e) date_from = request.GET.get('df') time_from = request.GET.get('tf') date_to = request.GET.get('dt') time_to = request.GET.get('tt') date_type = request.GET.get('date_type') today = timezone.now() # date format is Y-m-d if any(x is not None for x in [date_from, time_from, date_to, time_to]): date_from, date_to = parse_date(date_from, time_from), parse_date(date_to, time_to) else: date_from = today - datetime.timedelta(days=5) date_to = None if date_type == 'first_seen': if date_from: event_list = event_list.filter(first_seen__gte=date_from) elif date_to: event_list = event_list.filter(first_seen__lte=date_to) else: if date_from and date_to: event_list = event_list.filter( groupcountbyminute__date__gte=date_from, groupcountbyminute__date__lte=date_to, ) elif date_from: event_list = event_list.filter(last_seen__gte=date_from) elif date_to: event_list = event_list.filter(last_seen__lte=date_to) sort = request.GET.get('sort') or request.session.get('streamsort') if sort not in SORT_OPTIONS: sort = DEFAULT_SORT_OPTION # Save last sort in session if sort != request.session.get('streamsort'): request.session['streamsort'] = sort if sort.startswith('accel_') and not has_trending(): sort = DEFAULT_SORT_OPTION engine = get_db_engine('default') if engine.startswith('sqlite'): score_clause = SQLITE_SORT_CLAUSES.get(sort) filter_clause = SQLITE_SCORE_CLAUSES.get(sort) elif engine.startswith('mysql'): score_clause = MYSQL_SORT_CLAUSES.get(sort) filter_clause = MYSQL_SCORE_CLAUSES.get(sort) elif engine.startswith('oracle'): score_clause = ORACLE_SORT_CLAUSES.get(sort) filter_clause = ORACLE_SCORE_CLAUSES.get(sort) elif engine in ('django_pytds', 'sqlserver_ado', 'sql_server.pyodbc'): score_clause = MSSQL_SORT_CLAUSES.get(sort) filter_clause = MSSQL_SCORE_CLAUSES.get(sort) else: score_clause = SORT_CLAUSES.get(sort) filter_clause = SCORE_CLAUSES.get(sort) # IMPORTANT: All filters must already be applied once we reach this point if sort == 'tottime': event_list = event_list.filter(time_spent_count__gt=0) elif sort == 'avgtime': event_list = event_list.filter(time_spent_count__gt=0) elif sort.startswith('accel_'): event_list = Group.objects.get_accelerated( [project.id], event_list, minutes=int(sort.split('_', 1)[1])) if score_clause: event_list = event_list.extra( select={'sort_value': score_clause}, ) # HACK: don't sort by the same column twice if sort == 'date': event_list = event_list.order_by('-last_seen') else: event_list = event_list.order_by('-sort_value', '-last_seen') cursor = request.GET.get('cursor', request.GET.get('c')) if cursor: event_list = event_list.extra( where=['%s > %%s' % filter_clause], params=[float(cursor)], ) return { 'filters': filters, 'event_list': event_list, 'date_from': date_from, 'date_to': date_to, 'today': today, 'sort': sort, 'date_type': date_type }
groupcountbyminute__date__gte=date_from, groupcountbyminute__date__lte=date_to ) elif date_from: event_list = event_list.filter(last_seen__gte=date_from) elif date_to: event_list = event_list.filter(last_seen__lte=date_to) sort = request.GET.get("sort") or request.session.get("streamsort") if sort not in SORT_OPTIONS: sort = DEFAULT_SORT_OPTION # Save last sort in session if sort != request.session.get("streamsort"): request.session["streamsort"] = sort if sort.startswith("accel_") and not has_trending(): sort = DEFAULT_SORT_OPTION engine = get_db_engine("default") if engine.startswith("sqlite"): score_clause = SQLITE_SORT_CLAUSES.get(sort) filter_clause = SQLITE_SCORE_CLAUSES.get(sort) elif engine.startswith("mysql"): score_clause = MYSQL_SORT_CLAUSES.get(sort) filter_clause = MYSQL_SCORE_CLAUSES.get(sort) elif engine.startswith("oracle"): score_clause = ORACLE_SORT_CLAUSES.get(sort) filter_clause = ORACLE_SCORE_CLAUSES.get(sort) elif engine in ("django_pytds", "sqlserver_ado", "sql_server.pyodbc"): score_clause = MSSQL_SORT_CLAUSES.get(sort) filter_clause = MSSQL_SCORE_CLAUSES.get(sort)