def get_chart_data(self, instance, max_days=90): if hasattr(instance, '_state'): db = instance._state.db else: db = 'default' if not has_charts(db): return [] hours = max_days * 24 today = timezone.now().replace(microsecond=0, second=0, minute=0) min_date = today - datetime.timedelta(hours=hours) method = self._get_date_trunc('date', db) chart_qs = list(instance.messagecountbyminute_set\ .filter(date__gte=min_date)\ .extra(select={'grouper': method}).values('grouper')\ .annotate(num=Sum('times_seen')).values_list('grouper', 'num')\ .order_by('grouper')) if not chart_qs: return [] rows = dict(chart_qs) #just skip zeroes first_seen = hours while not rows.get(today - datetime.timedelta(hours=first_seen)) and first_seen > 24: first_seen -= 1 return [rows.get(today - datetime.timedelta(hours=d), 0) for d in xrange(first_seen, -1, -1)]
def event_list(request, project): filters = [] for cls in Filter.handlers.filter(Event): filters.append(cls(request)) try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 event_list = Event.objects.filter(project=project).order_by('-datetime') # TODO: implement separate API for messages for filter_ in filters: if not filter_.is_set(): continue event_list = filter_.get_query_set(event_list) offset = (page - 1) * settings.MESSAGES_PER_PAGE limit = page * settings.MESSAGES_PER_PAGE today = timezone.now() has_realtime = False return render_to_response('sentry/events/event_list.html', { 'project': project, 'has_realtime': has_realtime, 'event_list': event_list[offset:limit], 'today': today, 'filters': filters, }, request)
def event_list(request, project): filters = [] for cls in Filter.handlers.filter(Event): filters.append(cls(request)) try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 event_list = Event.objects.filter(project=project).order_by('-datetime') # TODO: implement separate API for messages for filter_ in filters: if not filter_.is_set(): continue event_list = filter_.get_query_set(event_list) offset = (page - 1) * settings.MESSAGES_PER_PAGE limit = page * settings.MESSAGES_PER_PAGE today = timezone.now() has_realtime = False return render_to_response( 'sentry/events/event_list.html', { 'project': project, 'has_realtime': has_realtime, 'event_list': event_list[offset:limit], 'today': today, 'filters': filters, }, request)
def timesince(value): from django.template.defaultfilters import timesince if not value: return _('Never') if value < timezone.now() - datetime.timedelta(days=5): return value.date() value = (' '.join(timesince(value).split(' ')[0:2])).strip(',') if value == _('0 minutes'): return _('Just now') if value == _('1 day'): return _('Yesterday') return value + _(' ago')
def from_kwargs(self, project, **kwargs): # TODO: this function is way too damn long and needs refactored # the inner imports also suck so let's try to move it away from # the objects manager from sentry.models import Event, Project, View from sentry.views import View as ViewHandler project = Project.objects.get(pk=project) if any(k in kwargs for k in ('view', 'message_id')): # we must be passing legacy data, let's convert it kwargs = self.convert_legacy_kwargs(kwargs) # First we pull out our top-level (non-data attr) kwargs event_id = kwargs.pop('event_id', None) message = kwargs.pop('message', None) culprit = kwargs.pop('culprit', None) level = kwargs.pop('level', None) or logging.ERROR time_spent = kwargs.pop('time_spent', None) logger_name = kwargs.pop('logger', 'root') server_name = kwargs.pop('server_name', None) site = kwargs.pop('site', None) date = kwargs.pop('timestamp', None) or timezone.now() extra = kwargs.pop('extra', None) modules = kwargs.pop('modules', None) if timezone.is_naive(date): date = timezone.make_aware(date, timezone.utc) if not message: raise InvalidData('Missing required parameter: message') checksum = kwargs.pop('checksum', None) if not checksum: checksum = hashlib.md5(message).hexdigest() data = kwargs for k, v in kwargs.iteritems(): if '.' not in k: raise InvalidInterface('%r is not a valid interface name' % k) try: interface = self.module_cache[k] except ImportError, e: raise InvalidInterface('%r is not a valid interface name: %s' % (k, e)) try: data[k] = interface(**v).serialize() except Exception, e: raise InvalidData('Unable to validate interface, %r: %s' % (k, e))
def group_list(request, project, view_id=None): try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 if view_id: try: view = View.objects.get(pk=view_id) except View.DoesNotExist: return HttpResponseRedirect(reverse('sentry', args=[project.pk])) else: view = None filters, event_list = _get_group_list( request=request, project=project, view=view, ) # XXX: this is duplicate in _get_group_list sort = request.GET.get('sort') if sort not in SORT_OPTIONS: sort = DEFAULT_SORT_OPTION sort_label = _get_sort_label(sort) today = timezone.now() has_realtime = page == 1 return render_to_response( 'sentry/groups/group_list.html', { 'project': project, 'has_realtime': has_realtime, 'event_list': event_list, 'today': today, 'sort': sort, 'sort_label': sort_label, 'filters': filters, 'view': view, 'HAS_TRENDING': has_trending(), }, request)
def group_list(request, project, view_id=None): try: page = int(request.GET.get('p', 1)) except (TypeError, ValueError): page = 1 if view_id: try: view = View.objects.get(pk=view_id) except View.DoesNotExist: return HttpResponseRedirect(reverse('sentry', args=[project.pk])) else: view = None filters, event_list = _get_group_list( request=request, project=project, view=view, ) # XXX: this is duplicate in _get_group_list sort = request.GET.get('sort') if sort not in SORT_OPTIONS: sort = DEFAULT_SORT_OPTION sort_label = _get_sort_label(sort) today = timezone.now() has_realtime = page == 1 return render_to_response('sentry/groups/group_list.html', { 'project': project, 'has_realtime': has_realtime, 'event_list': event_list, 'today': today, 'sort': sort, 'sort_label': sort_label, 'filters': filters, 'view': view, 'HAS_TRENDING': has_trending(), }, request)
def cleanup(days=30, logger=None, site=None, server=None, level=None, project=None): """ Deletes a portion of the trailing data in Sentry based on their creation dates. For example, if ``days`` is 30, this would attempt to clean up all data thats older than 30 days. :param logger: limit all deletion scopes to messages from the specified logger. :param site: limit the message deletion scope to the specified site. :param server: limit the message deletion scope to the specified server. :param level: limit all deleteion scopes to messages that are greater than or equal to level. """ import datetime from sentry.models import Group, Event, MessageCountByMinute, \ MessageFilterValue, FilterValue from sentry.utils import timezone from sentry.utils.query import RangeQuerySetWrapper, SkinnyQuerySet # TODO: we should collect which messages above were deleted # and potentially just send out post_delete signals where # GroupedMessage can update itself accordingly ts = timezone.now() - datetime.timedelta(days=days) # Message qs = SkinnyQuerySet(Event).filter(datetime__lte=ts) if logger: qs = qs.filter(logger=logger) if site: qs = qs.filter(site=site) if server: qs = qs.filter(server_name=server) if level: qs = qs.filter(level__gte=level) if project: qs = qs.filter(project=project) groups_to_check = set() for obj in RangeQuerySetWrapper(qs): print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() groups_to_check.add(obj.group_id) if not (server or site): # MessageCountByMinute qs = SkinnyQuerySet(MessageCountByMinute).filter(date__lte=ts) if logger: qs = qs.filter(group__logger=logger) if level: qs = qs.filter(group__level__gte=level) if project: qs = qs.filter(project=project) for obj in RangeQuerySetWrapper(qs): print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() # GroupedMessage qs = SkinnyQuerySet(Group).filter(last_seen__lte=ts) if logger: qs = qs.filter(logger=logger) if level: qs = qs.filter(level__gte=level) if project: qs = qs.filter(project=project) for obj in RangeQuerySetWrapper(qs): for key, value in SkinnyQuerySet(MessageFilterValue).filter(group=obj).values_list('key', 'value'): if not MessageFilterValue.objects.filter(key=key, value=value).exclude(group=obj).exists(): print ">>> Removing <FilterValue: key=%s, value=%s>" % (key, value) FilterValue.objects.filter(key=key, value=value).delete() print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() # attempt to cleanup any groups that may now be empty groups_to_delete = [] for group_id in groups_to_check: if not Event.objects.filter(group=group_id).exists(): groups_to_delete.append(group_id) if groups_to_delete: for obj in SkinnyQuerySet(Group).filter(pk__in=groups_to_delete): for key, value in SkinnyQuerySet(MessageFilterValue).filter(group=obj).values_list('key', 'value'): if not MessageFilterValue.objects.filter(key=key, value=value).exclude(group=obj).exists(): print ">>> Removing <FilterValue: key=%s, value=%s>" % (key, value) FilterValue.objects.filter(key=key, value=value).delete() print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete()
def cleanup(days=30, logger=None, site=None, server=None, level=None, project=None): """ Deletes a portion of the trailing data in Sentry based on their creation dates. For example, if ``days`` is 30, this would attempt to clean up all data thats older than 30 days. :param logger: limit all deletion scopes to messages from the specified logger. :param site: limit the message deletion scope to the specified site. :param server: limit the message deletion scope to the specified server. :param level: limit all deleteion scopes to messages that are greater than or equal to level. """ import datetime from sentry.models import Group, Event, MessageCountByMinute, \ MessageFilterValue, FilterValue from sentry.utils import timezone from sentry.utils.query import RangeQuerySetWrapper, SkinnyQuerySet # TODO: we should collect which messages above were deleted # and potentially just send out post_delete signals where # GroupedMessage can update itself accordingly ts = timezone.now() - datetime.timedelta(days=days) # Message qs = SkinnyQuerySet(Event).filter(datetime__lte=ts) if logger: qs = qs.filter(logger=logger) if site: qs = qs.filter(site=site) if server: qs = qs.filter(server_name=server) if level: qs = qs.filter(level__gte=level) if project: qs = qs.filter(project=project) groups_to_check = set() for obj in RangeQuerySetWrapper(qs): print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() groups_to_check.add(obj.group_id) if not (server or site): # MessageCountByMinute qs = SkinnyQuerySet(MessageCountByMinute).filter(date__lte=ts) if logger: qs = qs.filter(group__logger=logger) if level: qs = qs.filter(group__level__gte=level) if project: qs = qs.filter(project=project) for obj in RangeQuerySetWrapper(qs): print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() # GroupedMessage qs = SkinnyQuerySet(Group).filter(last_seen__lte=ts) if logger: qs = qs.filter(logger=logger) if level: qs = qs.filter(level__gte=level) if project: qs = qs.filter(project=project) for obj in RangeQuerySetWrapper(qs): for key, value in SkinnyQuerySet(MessageFilterValue).filter( group=obj).values_list('key', 'value'): if not MessageFilterValue.objects.filter( key=key, value=value).exclude(group=obj).exists(): print ">>> Removing <FilterValue: key=%s, value=%s>" % ( key, value) FilterValue.objects.filter(key=key, value=value).delete() print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() # attempt to cleanup any groups that may now be empty groups_to_delete = [] for group_id in groups_to_check: if not Event.objects.filter(group=group_id).exists(): groups_to_delete.append(group_id) if groups_to_delete: for obj in SkinnyQuerySet(Group).filter(pk__in=groups_to_delete): for key, value in SkinnyQuerySet(MessageFilterValue).filter( group=obj).values_list('key', 'value'): if not MessageFilterValue.objects.filter( key=key, value=value).exclude(group=obj).exists(): print ">>> Removing <FilterValue: key=%s, value=%s>" % ( key, value) FilterValue.objects.filter(key=key, value=value).delete() print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete()
command = getattr(commands, args[1]) for option in getattr(command, 'options', []): parser.add_option(option) (options, args) = parser.parse_args() config_path = options.config # We hardcode skipping this check via init if not os.path.exists(config_path): raise ValueError("Configuration file does not exist. Use 'init' to initialize the file.") environment['config'] = config_path environment['start_date'] = timezone.now() settings_from_file(config_path) # set debug if getattr(options, 'debug', False): django_settings.DEBUG = True # filter out reserved options kwargs = dict((k, v) for k, v in options.__dict__.iteritems() if k != 'config') # execute command if getattr(command, 'consume_args', False): command(args, **kwargs) else: command(**kwargs)