def test(self): closed_incident = create_incident( self.organization, IncidentType.CREATED, "Closed", "", groups=[self.group], date_started=timezone.now() - timedelta(days=30), ) update_incident_status(closed_incident, IncidentStatus.CLOSED) open_incident = create_incident( self.organization, IncidentType.CREATED, "Open", "", groups=[self.group], date_started=timezone.now() - timedelta(days=30), ) incidents = [closed_incident, open_incident] for incident, incident_stats in zip(incidents, bulk_get_incident_stats(incidents)): event_stats = get_incident_event_stats(incident) assert incident_stats["event_stats"].data["data"] == event_stats.data["data"] assert incident_stats["event_stats"].start == event_stats.start assert incident_stats["event_stats"].end == event_stats.end assert incident_stats["event_stats"].rollup == event_stats.rollup aggregates = get_incident_aggregates(incident) assert incident_stats["total_events"] == aggregates["count"] assert incident_stats["unique_users"] == aggregates["unique_users"]
def get(self, request, organization, incident): """ Fetch total event counts, unique user counts and trend graph for an Incident. `````````````````` :auth: required """ stats = bulk_get_incident_stats([incident], windowed_stats=True)[0] event_stats_serializer = SnubaTSResultSerializer( organization, None, request.user) results = { "eventStats": event_stats_serializer.serialize(stats["event_stats"]), "totalEvents": stats["total_events"], "uniqueUsers": stats["unique_users"], } return Response(results)
def get_attrs(self, item_list, user, **kwargs): incident_projects = defaultdict(list) for incident_project in IncidentProject.objects.filter( incident__in=item_list).select_related("project"): incident_projects[incident_project.incident_id].append( incident_project.project.slug) results = {} for incident, stats in zip(item_list, bulk_get_incident_stats(item_list)): results[incident] = { "projects": incident_projects.get(incident.id, []), "event_stats": stats["event_stats"], "total_events": stats["total_events"], "unique_users": stats["unique_users"], } return results
def test(self): closed_incident = create_incident( self.organization, IncidentType.ALERT_TRIGGERED, "Closed", "", QueryAggregations.TOTAL, groups=[self.group], date_started=timezone.now() - timedelta(days=30), ) update_incident_status(closed_incident, IncidentStatus.CLOSED) open_incident = create_incident( self.organization, IncidentType.ALERT_TRIGGERED, "Open", "", QueryAggregations.TOTAL, groups=[self.group], date_started=timezone.now() - timedelta(days=30), ) incidents = [closed_incident, open_incident] changed = False for incident, incident_stats in zip( incidents, bulk_get_incident_stats(incidents)): event_stats = get_incident_event_stats(incident) assert incident_stats["event_stats"].data[ "data"] == event_stats.data["data"] expected_start = incident_stats["event_stats"].start expected_end = incident_stats["event_stats"].end if not changed: expected_start = expected_start - calculate_incident_prewindow( expected_start, expected_end, incident) changed = True assert event_stats.start == expected_start assert event_stats.end == expected_end assert incident_stats["event_stats"].rollup == event_stats.rollup aggregates = get_incident_aggregates(incident) assert incident_stats["total_events"] == aggregates["count"] assert incident_stats["unique_users"] == aggregates["unique_users"]