def check_alerts(**kwargs): """ Iterates all current keys and fires additional tasks to check each individual project's alert settings. """ from sentry.models import ProjectCountByMinute from sentry.utils.queue import maybe_delay now = timezone.now() # we want at least a 60 second window of events max_date = now - timedelta(minutes=1) min_date = max_date - timedelta(minutes=MINUTE_NORMALIZATION) # find each project which has data for the last interval # TODO: we could force more work on the db by eliminating onces which don't have the full aggregate we need qs = ProjectCountByMinute.objects.filter( date__lte=max_date, date__gt=min_date, times_seen__gt=0, ).values_list('project_id', 'date', 'times_seen') for project_id, date, count in qs: normalized_count = int(count / ((now - date).seconds / 60)) maybe_delay(check_project_alerts, project_id=project_id, when=max_date, count=normalized_count, expires=120, )
def post_process_group(group, **kwargs): """ Fires post processing hooks for a group. """ for plugin in plugins.all(): if safe_execute(plugin.is_enabled, group.project): maybe_delay( plugin_post_process_group, plugin.slug, group=group, **kwargs)
for view in views: group.views.add(view) # save the event unless its been sampled if not is_sample: try: event.save() except IntegrityError: transaction.rollback_unless_managed(using=group._state.db) return event transaction.commit_unless_managed(using=group._state.db) if settings.USE_SEARCH: try: maybe_delay(index_event, event) except Exception, e: transaction.rollback_unless_managed(using=group._state.db) logger.exception(u'Error indexing document: %s', e) if is_new: try: regression_signal.send(sender=self.model, instance=group) except Exception, e: transaction.rollback_unless_managed(using=group._state.db) logger.exception(u'Error sending regression signal: %s', e) send_group_processors(group=group, event=event, is_new=is_new, is_sample=is_sample) return event
def insert_data_to_database(data): maybe_delay(store_event, data=data)
event.group = group # save the event unless its been sampled if not is_sample: try: event.save() except IntegrityError: transaction.rollback_unless_managed(using=group._state.db) return event transaction.commit_unless_managed(using=group._state.db) if settings.USE_SEARCH: try: maybe_delay(index_event, event) except Exception, e: transaction.rollback_unless_managed(using=group._state.db) logger.exception(u'Error indexing document: %s', e) if settings.SCRAPE_JAVASCRIPT_CONTEXT and event.platform == 'javascript' and not is_sample: try: maybe_delay(fetch_javascript_source, event) except Exception, e: transaction.rollback_unless_managed(using=group._state.db) logger.exception(u'Error fetching javascript source: %s', e) if is_new: try: regression_signal.send_robust(sender=self.model, instance=group) except Exception, e:
def send_group_processors(group, **kwargs): maybe_delay(post_process_group, group=group, **kwargs)
event.group = group # save the event unless its been sampled if not is_sample: try: event.save() except IntegrityError: transaction.rollback_unless_managed(using=group._state.db) return event transaction.commit_unless_managed(using=group._state.db) if settings.USE_SEARCH: try: maybe_delay(index_event, event) except Exception, e: transaction.rollback_unless_managed(using=group._state.db) logger.exception(u'Error indexing document: %s', e) if settings.SCRAPE_JAVASCRIPT_CONTEXT and event.platform == 'javascript' and not is_sample: try: maybe_delay(fetch_javascript_source, event) except Exception, e: transaction.rollback_unless_managed(using=group._state.db) logger.exception(u'Error fetching javascript source: %s', e) if is_new: try: regression_signal.send_robust(sender=self.model, instance=group)
def incr(self, model, columns, filters, extra=None): """ >>> incr(Group, columns={'times_seen': 1}, filters={'pk': group.pk}) """ maybe_delay(process_incr, model=model, columns=columns, filters=filters, extra=extra)