def _publish_to_kafka(self, request): """ Sends raw event data to Kafka for later offline processing. """ try: # This may fail when we e.g. send a multipart form. We ignore those errors for now. data = request.body if not data or len(data) > options.get('kafka-publisher.max-event-size'): return # Sampling if random.random() >= options.get('kafka-publisher.raw-event-sample-rate'): return # We want to send only serializable items from request.META meta = {} for key, value in request.META.items(): try: json.dumps([key, value]) meta[key] = value except (TypeError, ValueError): pass meta['SENTRY_API_VIEW_NAME'] = self.__class__.__name__ kafka_publisher.publish( channel=getattr(settings, 'KAFKA_RAW_EVENTS_PUBLISHER_TOPIC', 'raw-store-events'), value=json.dumps([meta, base64.b64encode(data)]) ) except Exception as e: logger.debug("Cannot publish event to Kafka: {}".format(e.message))
def open_resolve_dialog(self, data, group, integration): # XXX(epurkhiser): In order to update the original message we have to # keep track of the response_url in the callback_id. Definitely hacky, # but seems like there's no other solutions [1]: # # [1]: https://stackoverflow.com/questions/46629852/update-a-bot-message-after-responding-to-a-slack-dialog#comment80795670_46629852 callback_id = json.dumps({ 'issue': group.id, 'orig_response_url': data['response_url'], 'is_message': self.is_message(data), }) dialog = { 'callback_id': callback_id, 'title': u'Resolve Issue', 'submit_label': 'Resolve', 'elements': [RESOLVE_SELECTOR], } payload = { 'dialog': json.dumps(dialog), 'trigger_id': data['trigger_id'], 'token': integration.metadata['access_token'], } session = http.build_session() req = session.post('https://slack.com/api/dialog.open', data=payload) resp = req.json() if not resp.get('ok'): logger.error('slack.action.response-error', extra={'response': resp})
def get(self, request, organization, team, project, rule_id=None): if rule_id: try: rule = Rule.objects.get(project=project, id=rule_id) except Rule.DoesNotExist: path = reverse('sentry-project-rules', args=[organization.slug, project.slug]) return self.redirect(path) else: rule = Rule(project=project) action_list = [] condition_list = [] # TODO: conditions need to be based on actions for rule_type, rule_cls in rules: node = rule_cls(project) context = { 'id': node.id, 'label': node.label, 'html': node.render_form(), } if rule_type.startswith('condition/'): condition_list.append(context) elif rule_type.startswith('action/'): action_list.append(context) context = { 'rule': rule, 'page': 'rules', 'action_list': json.dumps(action_list), 'condition_list': json.dumps(condition_list), } return self.respond('sentry/projects/rules/new.html', context)
def process_service_hook(servicehook_id, event, **kwargs): try: servicehook = ServiceHook.objects.get(id=servicehook_id) except ServiceHook.DoesNotExist: return if servicehook.version == 0: payload = get_payload_v0(event) else: raise NotImplementedError from sentry import tsdb tsdb.incr(tsdb.models.servicehook_fired, servicehook.id) headers = { 'Content-Type': 'application/json', 'X-ServiceHook-Timestamp': six.text_type(int(time())), 'X-ServiceHook-GUID': servicehook.guid, 'X-ServiceHook-Signature': servicehook.build_signature(json.dumps(payload)), } safe_urlopen( url=servicehook.url, data=json.dumps(payload), headers=headers, timeout=5, verify_ssl=False, )
def dispatch(self, request): try: event_id = request.GET['eventId'] except KeyError: return self._json_response(request, status=400) key = self._get_project_key(request) if not key: return self._json_response(request, status=404) origin = self._get_origin(request) if not origin: return self._json_response(request, status=403) if not is_valid_origin(origin, key.project): return HttpResponse(status=403) if request.method == 'OPTIONS': return self._json_response(request) # TODO(dcramer): since we cant use a csrf cookie we should at the very # least sign the request / add some kind of nonce initial = { 'name': request.GET.get('name'), 'email': request.GET.get('email'), } form = UserReportForm(request.POST if request.method == 'POST' else None, initial=initial) if form.is_valid(): report = form.save(commit=False) report.project = key.project report.event_id = event_id try: report.group = Group.objects.get( eventmapping__event_id=report.event_id, eventmapping__project=key.project, ) except Group.DoesNotExist: # XXX(dcramer): the system should fill this in later pass report.save() return HttpResponse(status=200) elif request.method == 'POST': return self._json_response(request, { "errors": dict(form.errors), }, status=400) template = render_to_string('sentry/error-page-embed.html', { 'form': form, }) context = { 'endpoint': mark_safe(json.dumps(request.get_full_path())), 'template': mark_safe(json.dumps(template)), } return render_to_response('sentry/error-page-embed.js', context, request, content_type='text/javascript')
def test_escape(self): res = "<script>alert('&');</script>" assert json.dumps(res) == '"<script>alert(\'&\');</script>"' assert json.dumps( res, escape=True ) == '"\\u003cscript\\u003ealert(\\u0027\u0026\\u0027);\\u003c/script\\u003e"' assert json.dumps_htmlsafe( res ) == '"\\u003cscript\\u003ealert(\\u0027\u0026\\u0027);\\u003c/script\\u003e"'
def test_valid_member_selected(self): rule = self.get_rule(data={ 'workspace': self.integration.id, 'channel': '@morty', 'tags': '', }) channels = { 'ok': 'true', 'channels': [ {'name': 'my-channel', 'id': 'chan-id'}, {'name': 'other-chann', 'id': 'chan-id'}, ], } responses.add( method=responses.GET, url='https://slack.com/api/channels.list', status=200, content_type='application/json', body=json.dumps(channels), ) groups = { 'ok': 'true', 'groups': [ {'name': 'my-private-channel', 'id': 'chan-id'}, ], } responses.add( method=responses.GET, url='https://slack.com/api/groups.list', status=200, content_type='application/json', body=json.dumps(groups), ) members = { 'ok': 'true', 'members': [ {'name': 'morty', 'id': 'morty-id'}, {'name': 'other-user', 'id': 'user-id'}, ], } responses.add( method=responses.GET, url='https://slack.com/api/users.list', status=200, content_type='application/json', body=json.dumps(members), ) form = rule.get_form_instance() assert form.is_valid()
def to_python(cls, data): if not data.get('message'): raise InterfaceValidationError("No 'message' present") # TODO(dcramer): some day we should stop people from sending arbitrary # crap to the server if not isinstance(data['message'], six.string_types): data['message'] = json.dumps(data['message']) kwargs = { 'message': trim(data['message'], settings.SENTRY_MAX_MESSAGE_LENGTH), 'formatted': data.get('formatted'), } if data.get('params'): kwargs['params'] = trim(data['params'], 1024) else: kwargs['params'] = () if kwargs['formatted']: if not isinstance(kwargs['formatted'], six.string_types): data['formatted'] = json.dumps(data['formatted']) # support python-esque formatting (e.g. %s) elif '%' in kwargs['message'] and kwargs['params']: if isinstance(kwargs['params'], list): kwargs['params'] = tuple(kwargs['params']) try: kwargs['formatted'] = trim( kwargs['message'] % kwargs['params'], settings.SENTRY_MAX_MESSAGE_LENGTH, ) except Exception: pass # support very basic placeholder formatters (non-typed) elif '{}' in kwargs['message'] and kwargs['params']: try: kwargs['formatted'] = trim( kwargs['message'].format(kwargs['params']), settings.SENTRY_MAX_MESSAGE_LENGTH, ) except Exception: pass # don't wastefully store formatted message twice if kwargs['formatted'] == kwargs['message']: kwargs['formatted'] = None return cls(**kwargs)
def get_api_context(self, is_public=False): if is_public: return {} data = self.data if isinstance(data, dict): data = json.dumps(data) cookies = self.cookies or () if isinstance(cookies, dict): cookies = sorted(self.cookies.items()) headers = self.headers or () if isinstance(headers, dict): headers = sorted(self.headers.items()) data = { 'method': self.method, 'url': self.url, 'query': self.query_string, 'fragment': self.fragment, 'data': data, 'headers': headers, 'cookies': cookies, 'env': self.env or None, } return data
def set(self, key, value, timeout, version=None): key = self.make_key(key, version=version) v = json.dumps(value) if timeout: self.client.setex(key, int(timeout), v) else: self.client.set(key, v)
def resolve(request, project): gid = request.REQUEST.get('gid') if not gid: return HttpResponseForbidden() try: group = Group.objects.get(pk=gid) except Group.DoesNotExist: return HttpResponseForbidden() if group.project and group.project.pk not in get_project_list(request.user): return HttpResponseForbidden() Group.objects.filter(pk=group.pk).update(status=1) group.status = 1 data = [ (m.pk, { 'html': render_to_string('sentry/partial/_group.html', { 'group': m, 'request': request, }).strip(), 'count': m.times_seen, }) for m in [group]] response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def test_post_success(self, process): process.return_value = 'ok' resp = self.client.post( self.path, content_type='application/expect-ct-report+json', data=json.dumps({ "expect-ct-report": { "date-time": "2014-04-06T13:00:50Z", "hostname": "www.example.com", "port": 443, "effective-expiration-date": "2014-05-01T12:40:50Z", "served-certificate-chain": ["-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----"], "validated-certificate-chain": ["-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----"], "scts": [ { "version": 1, "status": "invalid", "source": "embedded", "serialized_sct": "ABCD==" }, ], } }), HTTP_USER_AGENT='awesome', ) assert resp.status_code == 201, resp.content
def test_no_actor(self): result = AppPlatformEvent( resource='event_alert', action='triggered', install=self.install, data={}, ) assert result.body == { 'action': 'triggered', 'installation': { 'uuid': self.install.uuid, }, 'data': {}, 'actor': { 'type': 'application', 'id': 'sentry', 'name': 'Sentry', } } body = json.dumps(result.body) signature = self.sentry_app.build_signature(body) assert result.headers['Content-Type'] == 'application/json' assert result.headers['Sentry-Hook-Resource'] == 'event_alert' assert result.headers['Sentry-Hook-Signature'] == signature
def serialize(context, obj): if 'request' in context: user = context['request'].user else: user = None return mark_safe(json.dumps(serialize_func(obj, user)))
def chart(request, team=None, project=None): gid = request.REQUEST.get('gid') days = int(request.REQUEST.get('days', '90')) if gid: try: group = Group.objects.get(pk=gid) except Group.DoesNotExist: return HttpResponseForbidden() data = Group.objects.get_chart_data(group, max_days=days) elif project: data = Project.objects.get_chart_data(project, max_days=days) elif team: cache_key = 'api.chart:team=%s,days=%s' % (team.id, days) data = cache.get(cache_key) if data is None: project_list = list(Project.objects.filter(team=team)) data = Project.objects.get_chart_data_for_group(project_list, max_days=days) cache.set(cache_key, data, 300) else: cache_key = 'api.chart:user=%s,days=%s' % (request.user.id, days) data = cache.get(cache_key) if data is None: project_list = Project.objects.get_for_user(request.user) data = Project.objects.get_chart_data_for_group(project_list, max_days=days) cache.set(cache_key, data, 300) response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def get_stats(request, team=None, project=None): minutes = int(request.REQUEST.get('minutes', 15)) if not team and project: project_list = [project] else: project_list = Project.objects.get_for_user(request.user, team=team) cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff num_events = ProjectCountByMinute.objects.filter( project__in=project_list, date__gte=cutoff_dt, ).aggregate(t=Sum('times_seen'))['t'] or 0 # XXX: This is too slow if large amounts of groups are resolved num_resolved = Group.objects.filter( project__in=project_list, status=STATUS_RESOLVED, resolved_at__gte=cutoff_dt, ).aggregate(t=Sum('times_seen'))['t'] or 0 data = { 'events': num_events, 'resolved': num_resolved, } response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def get_group_trends(request, project=None): minutes = int(request.REQUEST.get("minutes", 15)) limit = min(100, int(request.REQUEST.get("limit", 10))) if project: project_dict = {project.pk: project} else: project_dict = get_project_list(request.user) base_qs = Group.objects.filter(project__in=project_dict.keys(), status=0).order_by("-score") if has_trending(): group_list = list(Group.objects.get_accelerated(base_qs, minutes=(minutes))[:limit]) else: cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff group_list = list(base_qs.filter(last_seen__gte=cutoff_dt)[:limit]) for group in group_list: group._project_cache = project_dict.get(group.project_id) data = transform_groups(request, group_list, template="sentry/partial/_group_small.html") response = HttpResponse(json.dumps(data)) response["Content-Type"] = "application/json" return response
def get_resolved_groups(request, team=None, project=None): minutes = int(request.REQUEST.get('minutes', 15)) limit = min(100, int(request.REQUEST.get('limit', 10))) if team: project_list = list(Project.objects.filter(team=team)) elif project: project_list = [project] else: project_list = get_project_list(request.user).values() cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff group_list = Group.objects.filter( project__in=project_list, status=STATUS_RESOLVED, resolved_at__gte=cutoff_dt, ).select_related('project').order_by('-score')[:limit] data = to_json(group_list, request) response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def normalize_crumb(cls, crumb): ty = crumb.get('type') or 'default' ts = parse_timestamp(crumb.get('timestamp')) if ts is None: raise InterfaceValidationError('Unable to determine timestamp ' 'for crumb') rv = { 'type': ty, 'timestamp': to_timestamp(ts), } level = crumb.get('level') if level not in (None, 'info'): rv['level'] = level msg = crumb.get('message') if msg is not None: rv['message'] = trim(six.text_type(msg), 4096) category = crumb.get('category') if category is not None: rv['category'] = trim(six.text_type(category), 256) event_id = crumb.get('event_id') if event_id is not None: rv['event_id'] = event_id if crumb.get('data'): for key, value in six.iteritems(crumb['data']): if not isinstance(value, six.string_types): crumb['data'][key] = json.dumps(value) rv['data'] = trim(crumb['data'], 4096) return rv
def poll(request, project): from sentry.templatetags.sentry_plugins import handle_before_events offset = 0 limit = settings.MESSAGES_PER_PAGE view_id = request.GET.get("view_id") if view_id: try: view = View.objects.get_from_cache(pk=view_id) except View.DoesNotExist: return HttpResponseBadRequest() else: view = None response = _get_group_list(request=request, project=project, view=view) event_list = response["event_list"] event_list = list(event_list[offset:limit]) handle_before_events(request, event_list) data = transform_groups(request, event_list) response = HttpResponse(json.dumps(data)) response["Content-Type"] = "application/json" return response
def bookmark(request, project): gid = request.REQUEST.get('gid') if not gid: return HttpResponseForbidden() if not request.user.is_authenticated(): return HttpResponseForbidden() try: group = Group.objects.get(pk=gid) except Group.DoesNotExist: return HttpResponseForbidden() if group.project and group.project.pk not in get_project_list(request.user): return HttpResponseForbidden() gb, created = GroupBookmark.objects.get_or_create( project=group.project, user=request.user, group=group, ) if not created: gb.delete() response = HttpResponse(json.dumps({'bookmarked': created})) response['Content-Type'] = 'application/json' return response
def get_stats(request, team=None, project=None): minutes = int(request.REQUEST.get("minutes", 15)) if not team and project: project_list = [project] else: project_list = Project.objects.get_for_user(request.user, team=team) cutoff = datetime.timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff num_events = ( ProjectCountByMinute.objects.filter(project__in=project_list, date__gte=cutoff_dt).aggregate( t=Sum("times_seen") )["t"] or 0 ) # XXX: This is too slow if large amounts of groups are resolved num_resolved = ( Group.objects.filter(project__in=project_list, status=STATUS_RESOLVED, resolved_at__gte=cutoff_dt).aggregate( t=Sum("times_seen") )["t"] or 0 ) data = {"events": num_events, "resolved": num_resolved} response = HttpResponse(json.dumps(data)) response["Content-Type"] = "application/json" return response
def post(self, request, plugin_id, project_id, signature): project = Project.objects.get_from_cache(id=project_id) token = ProjectOption.objects.get_value(project, 'sentry:release-token') logger.info('Incoming webhook for project_id=%s, plugin_id=%s', project_id, plugin_id) if not self.verify(plugin_id, project_id, token, signature): logger.warn('Unable to verify signature for release hook') return HttpResponse(status=403) if plugin_id == 'builtin': return self._handle_builtin(request, project) plugin = plugins.get(plugin_id) if not plugin.is_enabled(project): logger.warn('Disabled release hook received for project_id=%s, plugin_id=%s', project_id, plugin_id) return HttpResponse(status=403) cls = plugin.get_release_hook() hook = cls(project) try: hook.handle(request) except HookValidationError as exc: return HttpResponse( status=400, content=json.dumps({'error': six.text_type(exc)}), content_type='application/json', ) return HttpResponse(status=204)
def test_valid_params(self): resp = self.client.post(self.path, data=json.dumps({ 'version': 'a', }), content_type='application/json') assert resp.status_code == 201, resp.content data = json.loads(resp.content) assert data['version'] == 'a'
def get_react_config(context): if 'request' in context: user = context['request'].user else: user = None if user: user = extract_lazy_object(user) enabled_features = [] if features.has('organizations:create', actor=user): enabled_features.append('organizations:create') if features.has('auth:register', actor=user): enabled_features.append('auth:register') context = { 'singleOrganization': settings.SENTRY_SINGLE_ORGANIZATION, 'urlPrefix': settings.SENTRY_URL_PREFIX, 'version': _get_version_info(), 'features': enabled_features, 'mediaUrl': reverse('sentry-media', args=['sentry', '']), } if user and user.is_authenticated(): context.update({ 'isAuthenticated': True, 'user': serialize(user, user), }) else: context.update({ 'isAuthenticated': False, 'user': None, }) return mark_safe(json.dumps(context))
def to_python(cls, data, has_system_frames=None): if not (data.get('type') or data.get('value')): raise InterfaceValidationError("No 'type' or 'value' present") if data.get('stacktrace') and data['stacktrace'].get('frames'): stacktrace = Stacktrace.to_python( data['stacktrace'], has_system_frames=has_system_frames, ) else: stacktrace = None type = data.get('type') value = data.get('value') if not type and ':' in value.split(' ', 1)[0]: type, value = value.split(':', 1) # in case of TypeError: foo (no space) value = value.strip() if value is not None and not isinstance(value, basestring): value = json.dumps(value) value = trim(value, 4096) kwargs = { 'type': trim(type, 128), 'value': value, 'module': trim(data.get('module'), 128), 'stacktrace': stacktrace, } return cls(**kwargs)
def resolve(request, project): gid = request.REQUEST.get("gid") if not gid: return HttpResponseForbidden() try: group = Group.objects.get(pk=gid) except Group.DoesNotExist: return HttpResponseForbidden() Group.objects.filter(pk=group.pk).update(status=1) group.status = 1 data = [ ( m.pk, { "html": render_to_string("sentry/partial/_group.html", {"group": m, "request": request}).strip(), "count": m.times_seen, }, ) for m in [group] ] response = HttpResponse(json.dumps(data)) response["Content-Type"] = "application/json" return response
def get_resolved_groups(request, team=None, project=None): minutes = int(request.REQUEST.get('minutes', 15)) limit = min(100, int(request.REQUEST.get('limit', 10))) if not team and project: project_list = [project] else: project_list = Project.objects.get_for_user(request.user, team=team) project_dict = dict((p.id, p) for p in project_list) cutoff = timedelta(minutes=minutes) cutoff_dt = timezone.now() - cutoff group_list = list(Group.objects.filter( project__in=project_list, status=STATUS_RESOLVED, resolved_at__gte=cutoff_dt, ).order_by('-score')[:limit]) for group in group_list: group._project_cache = project_dict.get(group.project_id) data = to_json(group_list, request) response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def resolve(request): gid = request.REQUEST.get('gid') if not gid: return HttpResponseForbidden() try: group = GroupedMessage.objects.get(pk=gid) except GroupedMessage.DoesNotExist: return HttpResponseForbidden() GroupedMessage.objects.filter(pk=group.pk).update( status=1, score=1, times_seen=1, ) if not request.is_ajax(): return HttpResponseRedirect(request.META.get('HTTP_REFERER') or reverse('sentry')) data = [ (m.pk, { 'html': render_to_string('sentry/partial/_group.html', { 'group': m, 'request': request, }).strip(), 'count': m.times_seen, }) for m in [group]] response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def _handle_builtin(self, request, project): endpoint = '/projects/{}/{}/releases/'.format( project.organization.slug, project.slug, ) try: # Ideally the API client would support some kind of god-mode here # as we've already confirmed credentials and simply want to execute # the view code. Instead we hack around it with an ApiKey instance god = ApiKey( organization=project.organization, scopes=getattr(ApiKey.scopes, 'project:write'), ) resp = client.post( endpoint, data=json.loads(request.body), auth=god, ) except client.ApiError as exc: return HttpResponse( status=exc.status_code, content=exc.body, content_type='application/json', ) return HttpResponse( status=resp.status_code, content=json.dumps(resp.data), content_type='application/json', )
def test_get_task_kwargs_for_message_version_1_unexpected_operation(): with pytest.raises(UnexpectedOperation): get_task_kwargs_for_message(json.dumps([1, "invalid", {}, {}]))
def on_link_shared(self, request: Request, slack_request: SlackDMRequest) -> bool: """Returns true on success""" matches: MutableMapping[LinkType, list[UnfurlableUrl]] = defaultdict(list) links_seen = set() data = slack_request.data.get("event", {}) # An unfurl may have multiple links to unfurl for item in data.get("links", []): try: url = item["url"] slack_shared_link = parse_link(url) except Exception as e: logger.error("slack.parse-link-error", extra={"error": str(e)}) continue # We would like to track what types of links users are sharing, but # it's a little difficult to do in Sentry because we filter requests # from Slack bots. Instead we just log to Kibana. logger.info("slack.link-shared", extra={"slack_shared_link": slack_shared_link}) link_type, args = match_link(url) # Link can't be unfurled if link_type is None or args is None: continue organization = slack_request.integration.organizations.first() if (organization and link_type == LinkType.DISCOVER and not slack_request.has_identity and features.has("organizations:discover-basic", organization, actor=request.user)): analytics.record( "integrations.slack.chart_unfurl", organization_id=organization.id, unfurls_count=0, ) self.prompt_link(slack_request) return True # Don't unfurl the same thing multiple times seen_marker = hash(json.dumps((link_type, args), sort_keys=True)) if seen_marker in links_seen: continue links_seen.add(seen_marker) matches[link_type].append(UnfurlableUrl(url=url, args=args)) if not matches: return False # Unfurl each link type results: MutableMapping[str, Any] = {} for link_type, unfurl_data in matches.items(): results.update(link_handlers[link_type].fn( request, slack_request.integration, unfurl_data, slack_request.user, )) if not results: return False access_token = self._get_access_token(slack_request.integration) payload = { "token": access_token, "channel": data["channel"], "ts": data["message_ts"], "unfurls": json.dumps(results), } client = SlackClient() try: client.post("/chat.unfurl", data=payload) except ApiError as e: logger.error("slack.event.unfurl-error", extra={"error": str(e)}, exc_info=True) return True
def setUp(self): super(SnubaTSDBTest, self).setUp() self.db = SnubaTSDB() self.now = datetime.utcnow().replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC ) self.proj1 = self.create_project() self.proj1env1 = self.create_environment(project=self.proj1, name="test") self.proj1env2 = self.create_environment(project=self.proj1, name="dev") self.proj1env3 = self.create_environment(project=self.proj1, name="staging") self.proj1defaultenv = self.create_environment(project=self.proj1, name="") self.proj1group1 = self.create_group(self.proj1) self.proj1group2 = self.create_group(self.proj1) hash1 = "1" * 32 hash2 = "2" * 32 GroupHash.objects.create(project=self.proj1, group=self.proj1group1, hash=hash1) GroupHash.objects.create(project=self.proj1, group=self.proj1group2, hash=hash2) self.release1 = Release.objects.create( organization_id=self.organization.id, version="1" * 10, date_added=self.now ) self.release1.add_project(self.proj1) self.release2 = Release.objects.create( organization_id=self.organization.id, version="2" * 10, date_added=self.now ) self.release2.add_project(self.proj1) self.group1release1 = GroupRelease.objects.create( project_id=self.proj1.id, group_id=self.proj1group1.id, release_id=self.release1.id ) self.group1release2 = GroupRelease.objects.create( project_id=self.proj1.id, group_id=self.proj1group1.id, release_id=self.release2.id ) self.group2release1 = GroupRelease.objects.create( project_id=self.proj1.id, group_id=self.proj1group2.id, release_id=self.release1.id ) data = json.dumps( [ ( 2, "insert", { "event_id": (six.text_type(r) * 32)[:32], "primary_hash": [hash1, hash2][(r // 600) % 2], # Switch every 10 mins "group_id": [self.proj1group1.id, self.proj1group2.id][(r // 600) % 2], "project_id": self.proj1.id, "message": "message 1", "platform": "python", "datetime": (self.now + timedelta(seconds=r)).strftime( "%Y-%m-%dT%H:%M:%S.%fZ" ), "data": { "type": "transaction" if r % 1200 == 0 else "error", "received": calendar.timegm(self.now.timetuple()) + r, "tags": { "foo": "bar", "baz": "quux", # Switch every 2 hours "environment": [self.proj1env1.name, None][(r // 7200) % 3], "sentry:user": u"id:user{}".format(r // 3300), "sentry:release": six.text_type(r // 3600) * 10, # 1 per hour }, "user": { # change every 55 min so some hours have 1 user, some have 2 "id": u"user{}".format(r // 3300), "email": u"user{}@sentry.io".format(r), }, }, }, ) for r in range(0, 14400, 600) ] ) # Every 10 min for 4 hours assert ( requests.post(settings.SENTRY_SNUBA + "/tests/events/insert", data=data).status_code == 200 ) # snuba trims query windows based on first_seen/last_seen, so these need to be correct-ish self.proj1group1.first_seen = self.now self.proj1group1.last_seen = self.now + timedelta(seconds=14400) self.proj1group1.save() self.proj1group2.first_seen = self.now self.proj1group2.last_seen = self.now + timedelta(seconds=14400) self.proj1group2.save()
def _makeMessage(self, data): return json.dumps(data).encode('utf-8')
def to_json(data): return json.dumps(data)
def build_attachment(group, event=None, tags=None, identity=None, actions=None, rules=None): # XXX(dcramer): options are limited to 100 choices, even when nested status = group.get_status() assignees = get_assignees(group) logo_url = absolute_uri( get_asset_url('sentry', 'images/sentry-email-avatar.png')) color = NEW_ISSUE_COLOR text = build_attachment_text(group, event) or '' if actions is None: actions = [] try: assignee = GroupAssignee.objects.get(group=group).user assignee = { 'text': assignee.get_display_name(), 'value': assignee.username, } # Add unassign option to the top of the list assignees.insert(0, UNASSIGN_OPTION) except GroupAssignee.DoesNotExist: assignee = None resolve_button = { 'name': 'resolve_dialog', 'value': 'resolve_dialog', 'type': 'button', 'text': 'Resolve...', } ignore_button = { 'name': 'status', 'value': 'ignored', 'type': 'button', 'text': 'Ignore', } if status == GroupStatus.RESOLVED: resolve_button.update({ 'name': 'status', 'text': 'Unresolve', 'value': 'unresolved', }) if status == GroupStatus.IGNORED: ignore_button.update({ 'text': 'Stop Ignoring', 'value': 'unresolved', }) payload_actions = [ resolve_button, ignore_button, { 'name': 'assign', 'text': 'Select Assignee...', 'type': 'select', 'options': assignees, 'selected_options': [assignee], }, ] fields = [] if tags: event_tags = event.tags if event else group.get_latest_event().tags for tag_key, tag_value in event_tags: if tag_key in tags: fields.append({ 'title': tag_key.encode('utf-8'), 'value': tag_value.encode('utf-8'), 'short': True, }) if actions: action_texts = filter( None, [build_action_text(identity, a) for a in actions]) text += '\n' + '\n'.join(action_texts) color = ACTIONED_ISSUE_COLOR payload_actions = [] ts = group.last_seen if event: event_ts = event.datetime ts = max(ts, event_ts) footer = u'{}'.format(group.qualified_short_id) if rules: footer += u' via {}'.format(rules[0].label) if len(rules) > 1: footer += u' (+{} other)'.format(len(rules) - 1) return { 'fallback': u'[{}] {}'.format(group.project.slug, group.title), 'title': build_attachment_title(group, event), 'title_link': add_notification_referrer_param(group.get_absolute_url(), 'slack'), 'text': text, 'fields': fields, 'mrkdwn_in': ['text'], 'callback_id': json.dumps({'issue': group.id}), 'footer_icon': logo_url, 'footer': footer, 'ts': to_timestamp(ts), 'color': color, 'actions': payload_actions, }
def get(self, request, **kwargs): return HttpResponse(json.dumps(schemas.EVENT_SCHEMA), content_type='application/json')
def dispatch(self, request, project_id=None, *args, **kwargs): helper = ClientApiHelper( agent=request.META.get('HTTP_USER_AGENT'), project_id=project_id, ip_address=request.META['REMOTE_ADDR'], ) origin = None if kafka_publisher is not None: self._publish_to_kafka(request) try: origin = self.auth_helper_cls.origin_from_request(request) response = self._dispatch(request, helper, project_id=project_id, origin=origin, *args, **kwargs) except APIError as e: context = { 'error': force_bytes(e.msg, errors='replace'), } if e.name: context['error_name'] = e.name response = HttpResponse(json.dumps(context), content_type='application/json', status=e.http_status) # Set X-Sentry-Error as in many cases it is easier to inspect the headers response['X-Sentry-Error'] = context['error'] if isinstance(e, APIRateLimited) and e.retry_after is not None: response['Retry-After'] = six.text_type( int(math.ceil(e.retry_after))) except Exception as e: # TODO(dcramer): test failures are not outputting the log message # here if settings.DEBUG: content = traceback.format_exc() else: content = '' logger.exception(e) response = HttpResponse(content, content_type='text/plain', status=500) # TODO(dcramer): it'd be nice if we had an incr_multi method so # tsdb could optimize this metrics.incr('client-api.all-versions.requests', skip_internal=False) metrics.incr('client-api.all-versions.responses.%s' % (response.status_code, ), skip_internal=False) metrics.incr( 'client-api.all-versions.responses.%sxx' % (six.text_type(response.status_code)[0], ), skip_internal=False, ) if helper.context.version: metrics.incr( 'client-api.v%s.requests' % (helper.context.version, ), skip_internal=False, ) metrics.incr( 'client-api.v%s.responses.%s' % (helper.context.version, response.status_code), skip_internal=False, ) metrics.incr( 'client-api.v%s.responses.%sxx' % (helper.context.version, six.text_type( response.status_code)[0]), skip_internal=False, ) if response.status_code != 200 and origin: # We allow all origins on errors response['Access-Control-Allow-Origin'] = '*' if origin: response['Access-Control-Allow-Headers'] = \ 'X-Sentry-Auth, X-Requested-With, Origin, Accept, ' \ 'Content-Type, Authentication' response['Access-Control-Allow-Methods'] = \ ', '.join(self._allowed_methods()) response['Access-Control-Expose-Headers'] = \ 'X-Sentry-Error, Retry-After' return response
def test_get_task_kwargs_for_message_version_1_skip_consume(): assert (get_task_kwargs_for_message( json.dumps([1, "insert", {}, { "skip_consume": True }])) is None)
def respond(self, *args, **kwargs): return HttpResponse(json.dumps(self.object), mimetype="application/json")
def jsonify(value): return to_unicode(value) if isinstance(value, six.string_types) else json.dumps(value)
def _makeMessage(self, data): return base64.b64encode(json.dumps(data))
def get_prep_value(self, value): if value is None: if not self.null and self.blank: return "" return None return json.dumps(value, default=default, **self.encoder_kwargs)
def gen_aws_client(account_number, region, aws_external_id, service_name="lambda"): """ account_number - acccount number in AWS regon - region in AWS aws_external_id - the external_id used to assume the role Returns an aws_lambda_client """ role_arn = "arn:aws:iam::%s:role/SentryRole" % (account_number) aws_access_key_id = options.get("aws-lambda.access-key-id") aws_secret_access_key = options.get("aws-lambda.secret-access-key") # throw a configuration error if we don't have keys if not aws_access_key_id or not aws_secret_access_key: raise ConfigurationError( "AWS access key ID or secret access key not set") client = boto3.client( service_name="sts", aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=options.get("aws-lambda.host-region"), ) # need policy statements for cross account access assumed_role_object = client.assume_role( RoleSessionName="Sentry", RoleArn=role_arn, ExternalId=aws_external_id, Policy=json.dumps({ "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": [ "lambda:UpdateFunctionConfiguration", "lambda:GetFunction" ], "Resource": f"arn:aws:lambda:{region}:{account_number}:function:*", }, { "Effect": "Allow", "Action": [ "lambda:ListFunctions", "lambda:GetLayerVersion", "organizations:DescribeAccount", ], "Resource": "*", }, ], }), ) credentials = assumed_role_object["Credentials"] boto3_session = boto3.Session( aws_access_key_id=credentials["AccessKeyId"], aws_secret_access_key=credentials["SecretAccessKey"], aws_session_token=credentials["SessionToken"], ) return boto3_session.client(service_name=service_name, region_name=region)
def notify(self, notification): event = notification.event group = event.group project = group.project if not self.is_configured(project): return webhook = self.get_option("webhook", project) username = (self.get_option("username", project) or "Sentry").strip() icon_url = self.get_option("icon_url", project) channel = (self.get_option("channel", project) or "").strip() title = event.title.encode("utf-8") # TODO(dcramer): we'd like this to be the event culprit, but Sentry # does not currently retain it if group.culprit: culprit = group.culprit.encode("utf-8") else: culprit = None project_name = project.get_full_name().encode("utf-8") fields = [] # They can be the same if there is no culprit # So we set culprit to an empty string instead of duplicating the text if not self.get_option("exclude_culprit", project) and culprit and title != culprit: fields.append({ "title": "Culprit", "value": culprit, "short": False }) if not self.get_option("exclude_project", project): fields.append({ "title": "Project", "value": project_name, "short": True }) if self.get_option("custom_message", project): fields.append({ "title": "Custom message", "value": self.get_option("custom_message", project), "short": False, }) if self.get_option("include_rules", project): rules = [] for rule in notification.rules: rule_link = "/%s/%s/settings/alerts/rules/%s/" % ( group.organization.slug, project.slug, rule.id, ) # Make sure it's an absolute uri since we're sending this # outside of Sentry into Slack rule_link = absolute_uri(rule_link) rules.append((rule_link, rule.label)) if rules: value = u", ".join(u"<{} | {}>".format(*r) for r in rules) fields.append({ "title": "Triggered By", "value": value.encode("utf-8"), "short": False }) if self.get_option("include_tags", project): included_tags = set( self.get_tag_list("included_tag_keys", project) or []) excluded_tags = set( self.get_tag_list("excluded_tag_keys", project) or []) for tag_key, tag_value in self._get_tags(event): key = tag_key.lower() std_key = tagstore.get_standardized_key(key) if included_tags and key not in included_tags and std_key not in included_tags: continue if excluded_tags and (key in excluded_tags or std_key in excluded_tags): continue fields.append({ "title": tag_key.encode("utf-8"), "value": tag_value.encode("utf-8"), "short": True, }) payload = { "attachments": [{ "fallback": "[%s] %s" % (project_name, title), "title": title, "title_link": group.get_absolute_url(params={"referrer": "slack"}), "color": self.color_for_event(event), "fields": fields, }] } if username: payload["username"] = username.encode("utf-8") if channel: payload["channel"] = channel if icon_url: payload["icon_url"] = icon_url values = {"payload": json.dumps(payload)} # Apparently we've stored some bad data from before we used `URLField`. webhook = webhook.strip(" ") return http.safe_urlopen(webhook, method="POST", data=values, timeout=5)
def send_notification_as_slack( notification: BaseNotification, recipients: Union[Set[User], Set[Team]], shared_context: Mapping[str, Any], extra_context_by_user_id: Optional[Mapping[int, Mapping[str, Any]]], ) -> None: """Send an "activity" or "alert rule" notification to a Slack user or team.""" client = SlackClient() data = get_channel_and_token_by_recipient(notification.organization, recipients) for recipient, tokens_by_channel in data.items(): is_multiple = True if len([token for token in tokens_by_channel ]) > 1 else False if is_multiple: logger.info( "notification.multiple.slack_post", extra={ "notification": notification, "recipient": recipient.id, }, ) extra_context = (extra_context_by_user_id or {}).get(recipient.id, {}) context = get_context(notification, recipient, shared_context, extra_context) attachment = [ build_notification_attachment(notification, context, recipient) ] for channel, token in tokens_by_channel.items(): # unfurl_links and unfurl_media are needed to preserve the intended message format # and prevent the app from replying with help text to the unfurl payload = { "token": token, "channel": channel, "link_names": 1, "unfurl_links": False, "unfurl_media": False, "text": notification.get_notification_title(), "attachments": json.dumps(attachment), } try: client.post("/chat.postMessage", data=payload, timeout=5) except ApiError as e: logger.info( "notification.fail.slack_post", extra={ "error": str(e), "notification": notification, "recipient": recipient.id, "channel_id": channel, "is_multiple": is_multiple, }, ) analytics.record( "integrations.slack.notification_sent", organization_id=notification.organization.id, project_id=notification.project.id, category=notification.get_category(), actor_id=recipient.actor_id, ) key = get_key(notification) metrics.incr( f"{key}.notifications.sent", instance=f"slack.{key}.notification", skip_internal=False, )
def build_mock_message(self, data, topic=None): message = Mock() message.value.return_value = json.dumps(data) if topic: message.topic.return_value = topic return message
def notify(self, notification): event = notification.event group = event.group project = group.project if not self.is_configured(project): return webhook = self.get_option('webhook', project) username = (self.get_option('username', project) or 'Sentry').strip() icon_url = self.get_option('icon_url', project) channel = (self.get_option('channel', project) or '').strip() title = event.message_short.encode('utf-8') # TODO(dcramer): we'd like this to be the event culprit, but Sentry # does not currently retain it if group.culprit: culprit = group.culprit.encode('utf-8') else: culprit = None project_name = get_project_full_name(project).encode('utf-8') fields = [] # They can be the same if there is no culprit # So we set culprit to an empty string instead of duplicating the text if culprit and title != culprit: fields.append({ 'title': 'Culprit', 'value': culprit, 'short': False, }) fields.append({ 'title': 'Project', 'value': project_name, 'short': True, }) if self.get_option('include_rules', project): rules = [] for rule in notification.rules: rule_link = reverse( 'sentry-edit-project-rule', args=[group.organization.slug, project.slug, rule.id]) # Make sure it's an absolute uri since we're sending this # outside of Sentry into Slack rule_link = absolute_uri(rule_link) rules.append((rule_link, rule.label.encode('utf-8'))) if rules: fields.append({ 'title': 'Triggered By', 'value': ', '.join('<%s | %s>' % r for r in rules), 'short': False, }) if self.get_option('include_tags', project): included_tags = set( self.get_tag_list('included_tag_keys', project) or []) excluded_tags = set( self.get_tag_list('excluded_tag_keys', project) or []) for tag_key, tag_value in self._get_tags(event): key = tag_key.lower() std_key = TagKey.get_standardized_key(key) if included_tags and key not in included_tags and std_key not in included_tags: continue if excluded_tags and (key in excluded_tags or std_key in excluded_tags): continue fields.append({ 'title': tag_key.encode('utf-8'), 'value': tag_value.encode('utf-8'), 'short': True, }) payload = { 'parse': 'none', 'attachments': [{ 'fallback': '[%s] %s' % (project_name, title), 'title': title, 'title_link': group.get_absolute_url(), 'color': self.color_for_event(event), 'fields': fields, }] } if username: payload['username'] = username.encode('utf-8') if channel: payload['channel'] = channel if icon_url: payload['icon_url'] = icon_url values = {'payload': json.dumps(payload)} # Apparently we've stored some bad data from before we used `URLField`. webhook = webhook.strip(' ') return http.safe_urlopen(webhook, method='POST', data=values)
def snuba_search( self, start, end, project_ids, environment_ids, sort_field, cursor=None, group_ids=None, limit=None, offset=0, get_sample=False, search_filters=None, ): """ Returns a tuple of: * a sorted list of (group_id, group_score) tuples sorted descending by score, * the count of total results (rows) available for this query. """ filters = {"project_id": project_ids} if environment_ids is not None: filters["environment"] = environment_ids if group_ids: filters["group_id"] = sorted(group_ids) conditions = [] having = [] for search_filter in search_filters: if ( # Don't filter on postgres fields here, they're not available search_filter.key.name in self.postgres_only_fields or # We special case date search_filter.key.name == "date"): continue converted_filter = convert_search_filter_to_snuba_query( search_filter) converted_filter = self._transform_converted_filter( search_filter, converted_filter, project_ids, environment_ids) if converted_filter is not None: # Ensure that no user-generated tags that clashes with aggregation_defs is added to having if search_filter.key.name in self.aggregation_defs and not search_filter.key.is_tag: having.append(converted_filter) else: conditions.append(converted_filter) extra_aggregations = self.dependency_aggregations.get(sort_field, []) required_aggregations = set([sort_field, "total"] + extra_aggregations) for h in having: alias = h[0] required_aggregations.add(alias) aggregations = [] for alias in required_aggregations: aggregations.append(self.aggregation_defs[alias] + [alias]) if cursor is not None: having.append( (sort_field, ">=" if cursor.is_prev else "<=", cursor.value)) selected_columns = [] if get_sample: query_hash = md5( json.dumps(conditions).encode("utf-8")).hexdigest()[:8] selected_columns.append(("cityHash64", ("'{}'".format(query_hash), "group_id"), "sample")) sort_field = "sample" orderby = [sort_field] referrer = "search_sample" else: # Get the top matching groups by score, i.e. the actual search results # in the order that we want them. orderby = [ "-{}".format(sort_field), "group_id", ] # ensure stable sort within the same score referrer = "search" snuba_results = snuba.aliased_query( dataset=self.dataset, start=start, end=end, selected_columns=selected_columns, groupby=["group_id"], conditions=conditions, having=having, filter_keys=filters, aggregations=aggregations, orderby=orderby, referrer=referrer, limit=limit, offset=offset, totals= True, # Needs to have totals_mode=after_having_exclusive so we get groups matching HAVING only turbo=get_sample, # Turn off FINAL when in sampling mode sample=1, # Don't use clickhouse sampling, even when in turbo mode. condition_resolver=snuba.get_snuba_column_name, ) rows = snuba_results["data"] total = snuba_results["totals"]["total"] if not get_sample: metrics.timing("snuba.search.num_result_groups", len(rows)) return [(row["group_id"], row[sort_field]) for row in rows], total
def run_test(self, message): self.consumer.parse_message_value(json.dumps(message))
def handle_user_report(self, payload, project, **kwargs): metrics.incr("mail_adapter.handle_user_report") group = Group.objects.get(id=payload["report"]["issue"]["id"]) participants = GroupSubscription.objects.get_participants( group=group).get(ExternalProviders.EMAIL) if not participants: return org = group.organization enhanced_privacy = org.flags.enhanced_privacy context = { "project": project, "project_link": absolute_uri(f"/{project.organization.slug}/{project.slug}/"), "issue_link": absolute_uri("/{}/{}/issues/{}/".format( project.organization.slug, project.slug, payload["report"]["issue"]["id"])), # TODO(dcramer): we dont have permalinks to feedback yet "link": absolute_uri("/{}/{}/issues/{}/feedback/".format( project.organization.slug, project.slug, payload["report"]["issue"]["id"])), "group": group, "report": payload["report"], "enhanced_privacy": enhanced_privacy, } subject_prefix = self._build_subject_prefix(project) subject = force_text("{}{} - New Feedback from {}".format( subject_prefix, group.qualified_short_id, payload["report"]["name"])) headers = { "X-Sentry-Project": project.slug, "X-SMTPAPI": json.dumps({"category": "user_report_email"}), } # TODO(dcramer): this is copypasta'd from activity notifications # and while it'd be nice to re-use all of that, they are currently # coupled to <Activity> instances which makes this tough for user, reason in participants.items(): context.update({ "reason": GroupSubscriptionReason.descriptions.get( reason, "are subscribed to this issue"), "unsubscribe_link": generate_signed_link( user.id, "sentry-account-email-unsubscribe-issue", kwargs={"issue_id": group.id}, ), }) msg = MessageBuilder( subject=subject, template="sentry/emails/activity/new-user-feedback.txt", html_template="sentry/emails/activity/new-user-feedback.html", headers=headers, type="notify.user-report", context=context, reference=group, ) msg.add_users([user.id], project=project) msg.send_async()
def to_json(obj, request=None): result = transform(obj, request=request) return json.dumps(result)
def create_or_edit_rule(request, organization, project, rule_id=None): if rule_id: try: rule = Rule.objects.get(project=project, id=rule_id) except Rule.DoesNotExist: path = reverse('sentry-project-rules', args=[organization.slug, project.slug]) return HttpResponseRedirect(path) else: rule = Rule(project=project) form_data = { 'label': rule.label, 'action_match': rule.data.get('action_match'), } if request.POST: for key, value in request.POST.iteritems(): form_data[key] = value else: for num, node in enumerate(rule.data.get('conditions', [])): prefix = 'condition[%d]' % (num, ) for key, value in node.iteritems(): form_data[prefix + '[' + key + ']'] = value for num, node in enumerate(rule.data.get('actions', [])): prefix = 'action[%d]' % (num, ) for key, value in node.iteritems(): form_data[prefix + '[' + key + ']'] = value validator = RuleFormValidator(project, form_data) if request.POST and validator.is_valid(): data = validator.cleaned_data.copy() rule.label = data.pop('label') rule.data = data rule.save() messages.add_message(request, messages.SUCCESS, _('Changes to your rule were saved.')) path = reverse('sentry-project-rules', args=[organization.slug, project.slug]) return HttpResponseRedirect(path) action_list = [] condition_list = [] # TODO: conditions need to be based on actions for rule_type, rule in rules: node = rule(project) context = { 'id': node.id, 'label': node.label, 'html': node.render_form(), } if rule_type.startswith('condition/'): condition_list.append(context) elif rule_type.startswith('action/'): action_list.append(context) context = csrf(request) context.update({ 'rule': rule, 'form_is_valid': (not request.POST or validator.is_valid()), 'form_errors': validator.errors, 'form_data': form_data, 'organization': organization, 'team': project.team, 'page': 'rules', 'action_list': json.dumps(action_list), 'condition_list': json.dumps(condition_list), 'project': project, }) return render_to_response('sentry/projects/rules/new.html', context, request)
def notify_digest( self, project: Project, digest: Any, target_type: ActionTargetType, target_identifier: Optional[int] = None, ) -> None: metrics.incr("mail_adapter.notify_digest") users = get_send_to(project, target_type, target_identifier).get(ExternalProviders.EMAIL) if not users: return user_ids = {user.id for user in users} logger.info( "mail.adapter.notify_digest", extra={ "project_id": project.id, "target_type": target_type.value, "target_identifier": target_identifier, "user_ids": user_ids, }, ) for user_id, digest in get_personalized_digests( target_type, project.id, digest, user_ids): start, end, counts = get_digest_metadata(digest) # If there is only one group in this digest (regardless of how many # rules it appears in), we should just render this using the single # notification template. If there is more than one record for a group, # just choose the most recent one. if len(counts) == 1: group = next(iter(counts)) record = max( itertools.chain.from_iterable( groups.get(group, []) for groups in digest.values()), key=lambda record: record.timestamp, ) notification = Notification(record.value.event, rules=record.value.rules) return self.notify(notification, target_type, target_identifier) context = { "start": start, "end": end, "project": project, "digest": digest, "counts": counts, } headers = { "X-Sentry-Project": project.slug, "X-SMTPAPI": json.dumps({"category": "digest_email"}), } group = next(iter(counts)) subject = self.get_digest_subject(group, counts, start) self.add_unsubscribe_link(context, user_id, project, "alert_digest") self._send_mail( subject=subject, template="sentry/emails/digests/body.txt", html_template="sentry/emails/digests/body.html", project=project, reference=project, headers=headers, type="notify.digest", context=context, send_to=[user_id], )
def size(self): return len(json.dumps(dict(self.data)))
def test_get_task_kwargs_for_message_invalid_version(): with pytest.raises(InvalidVersion): get_task_kwargs_for_message(json.dumps([0, "insert", {}]))
def notify(self, notification: Notification, raise_exception: bool = False) -> None: event = notification.event group = event.group project = group.project if not self.is_configured(project): return title = event.title.encode("utf-8") # TODO(dcramer): we'd like this to be the event culprit, but Sentry # does not currently retain it if group.culprit: culprit = group.culprit.encode("utf-8") else: culprit = None project_name = project.get_full_name().encode("utf-8") fields = [] # They can be the same if there is no culprit # So we set culprit to an empty string instead of duplicating the text if not self.get_option("exclude_culprit", project) and culprit and title != culprit: fields.append({ "title": "Culprit", "value": culprit, "short": False }) if not self.get_option("exclude_project", project): fields.append({ "title": "Project", "value": project_name, "short": True }) if self.get_option("custom_message", project): fields.append({ "title": "Custom message", "value": self.get_option("custom_message", project), "short": False, }) if self.get_option("include_rules", project): rules = [] for rule in notification.rules: rule_link = ( f"/{group.organization.slug}/{project.slug}/settings/alerts/rules/{rule.id}/" ) # Make sure it's an absolute uri since we're sending this # outside of Sentry into Slack rule_link = absolute_uri(rule_link) rules.append((rule_link, rule.label)) if rules: value = ", ".join("<{} | {}>".format(*r) for r in rules) fields.append({ "title": "Triggered By", "value": value.encode("utf-8"), "short": False }) if self.get_option("include_tags", project): included_tags = set( self.get_tag_list("included_tag_keys", project) or []) excluded_tags = set( self.get_tag_list("excluded_tag_keys", project) or []) for tag_key, tag_value in self._get_tags(event): key = tag_key.lower() std_key = tagstore.get_standardized_key(key) if included_tags and key not in included_tags and std_key not in included_tags: continue if excluded_tags and (key in excluded_tags or std_key in excluded_tags): continue fields.append({ "title": tag_key.encode("utf-8"), "value": tag_value.encode("utf-8"), "short": True, }) payload = { "attachments": [{ "fallback": b"[%s] %s" % (project_name, title), "title": title, "title_link": group.get_absolute_url(params={"referrer": "slack"}), "color": self.color_for_event(event), "fields": fields, }] } client = self.get_client(project) if client.username: payload["username"] = client.username.encode("utf-8") if client.channel: payload["channel"] = client.channel if client.icon_url: payload["icon_url"] = client.icon_url try: client.request({"payload": json.dumps(payload)}) except ApiError as e: # Ignore 404 and ignorable errors from slack webhooks. if raise_exception or not (e.text in IGNORABLE_SLACK_ERRORS or e.code in IGNORABLE_SLACK_ERROR_CODES): raise e
def dispatch(self, request): try: event_id = request.GET['eventId'] except KeyError: return self._json_response(request, status=400) if not is_event_id(event_id): return self._json_response(request, status=400) key = self._get_project_key(request) if not key: return self._json_response(request, status=404) origin = self._get_origin(request) if not origin: return self._json_response(request, status=403) if not is_valid_origin(origin, key.project): return HttpResponse(status=403) if request.method == 'OPTIONS': return self._json_response(request) # TODO(dcramer): since we cant use a csrf cookie we should at the very # least sign the request / add some kind of nonce initial = { 'name': request.GET.get('name'), 'email': request.GET.get('email'), } form = UserReportForm( request.POST if request.method == 'POST' else None, initial=initial) if form.is_valid(): # TODO(dcramer): move this to post to the internal API report = form.save(commit=False) report.project = key.project report.event_id = event_id try: mapping = EventMapping.objects.get( event_id=report.event_id, project_id=key.project_id, ) except EventMapping.DoesNotExist: # XXX(dcramer): the system should fill this in later pass else: report.group = Group.objects.get(id=mapping.group_id) try: with transaction.atomic(): report.save() except IntegrityError: # There was a duplicate, so just overwrite the existing # row with the new one. The only way this ever happens is # if someone is messing around with the API, or doing # something wrong with the SDK, but this behavior is # more reasonable than just hard erroring and is more # expected. UserReport.objects.filter( project=report.project, event_id=report.event_id, ).update( name=report.name, email=report.email, comments=report.comments, date_added=timezone.now(), ) return self._json_response(request) elif request.method == 'POST': return self._json_response(request, { "errors": dict(form.errors), }, status=400) show_branding = ProjectOption.objects.get_value( project=key.project, key='feedback:branding', default='1') == '1' template = render_to_string('sentry/error-page-embed.html', { 'form': form, 'show_branding': show_branding, }) context = { 'endpoint': mark_safe('*/' + json.dumps(request.build_absolute_uri()) + ';/*'), 'template': mark_safe('*/' + json.dumps(template) + ';/*'), 'strings': json.dumps_htmlsafe({ 'generic_error': six.text_type(GENERIC_ERROR), 'form_error': six.text_type(FORM_ERROR), 'sent_message': six.text_type(SENT_MESSAGE), }), } return render_to_response('sentry/error-page-embed.js', context, request, content_type='text/javascript')
def build_attachment(group, event=None, tags=None, identity=None, actions=None, rules=None): # XXX(dcramer): options are limited to 100 choices, even when nested status = group.get_status() members = get_member_assignees(group) teams = get_team_assignees(group) logo_url = absolute_uri( get_asset_url('sentry', 'images/sentry-email-avatar.png')) color = LEVEL_TO_COLOR.get(event.get_tag('level'), 'error') if event else LEVEL_TO_COLOR['error'] text = build_attachment_text(group, event) or '' if actions is None: actions = [] assignee = get_assignee(group) resolve_button = { 'name': 'resolve_dialog', 'value': 'resolve_dialog', 'type': 'button', 'text': 'Resolve...', } ignore_button = { 'name': 'status', 'value': 'ignored', 'type': 'button', 'text': 'Ignore', } has_releases = Release.objects.filter( projects=group.project, organization_id=group.project.organization_id).exists() if not has_releases: resolve_button.update({ 'name': 'status', 'text': 'Resolve', 'value': 'resolved', }) if status == GroupStatus.RESOLVED: resolve_button.update({ 'name': 'status', 'text': 'Unresolve', 'value': 'unresolved', }) if status == GroupStatus.IGNORED: ignore_button.update({ 'text': 'Stop Ignoring', 'value': 'unresolved', }) option_groups = [] if teams: option_groups.append({ 'text': 'Teams', 'options': teams, }) if members: option_groups.append({ 'text': 'People', 'options': members, }) payload_actions = [ resolve_button, ignore_button, { 'name': 'assign', 'text': 'Select Assignee...', 'type': 'select', 'selected_options': [assignee], 'option_groups': option_groups, }, ] fields = [] if tags: event_tags = event.tags if event else group.get_latest_event().tags for key, value in event_tags: std_key = tagstore.get_standardized_key(key) if std_key not in tags: continue labeled_value = tagstore.get_tag_value_label(key, value) fields.append({ 'title': std_key.encode('utf-8'), 'value': labeled_value.encode('utf-8'), 'short': True, }) if actions: action_texts = filter( None, [build_action_text(group, identity, a) for a in actions]) text += '\n' + '\n'.join(action_texts) color = ACTIONED_ISSUE_COLOR payload_actions = [] ts = group.last_seen if event: event_ts = event.datetime ts = max(ts, event_ts) footer = u'{}'.format(group.qualified_short_id) if rules: footer += u' via {}'.format(rules[0].label) if len(rules) > 1: footer += u' (+{} other)'.format(len(rules) - 1) return { 'fallback': u'[{}] {}'.format(group.project.slug, group.title), 'title': build_attachment_title(group, event), 'title_link': group.get_absolute_url(params={'referrer': 'slack'}), 'text': text, 'fields': fields, 'mrkdwn_in': ['text'], 'callback_id': json.dumps({'issue': group.id}), 'footer_icon': logo_url, 'footer': footer, 'ts': to_timestamp(ts), 'color': color, 'actions': payload_actions, }
def test_get_task_kwargs_for_message_version_1_unsupported_operation(): assert get_task_kwargs_for_message(json.dumps([1, "delete", {}])) is None