def setUp(self): super().setUp() self.min_ago = iso_format(before_now(minutes=1)) self.two_min_ago = iso_format(before_now(minutes=2)) self.just_over_one_min_ago = iso_format(before_now(seconds=61))
def test_delete_old_entries(self): # As new events come in associated with new owners, we should delete old ones. self.set_release_commits(self.user.email) process_suspect_commits(self.event) process_suspect_commits(self.event) process_suspect_commits(self.event) assert GroupOwner.objects.filter(group=self.event.group).count() == 1 assert GroupOwner.objects.filter(group=self.event.group, user=self.user).exists() event_2 = self.store_event( data={ "message": "BANG!", "platform": "python", "timestamp": iso_format(before_now(seconds=1)), "stacktrace": { "frames": [ { "function": "process_suspect_commits", "abs_path": "/usr/src/sentry/src/sentry/tasks/groupowner.py", "module": "sentry.tasks.groupowner", "in_app": True, "lineno": 48, "filename": "sentry/tasks/groupowner.py", }, ] }, "tags": {"sentry:release": self.release.version}, "fingerprint": ["put-me-in-the-control-group"], }, project_id=self.project.id, ) event_3 = self.store_event( data={ "message": "BOP!", "platform": "python", "timestamp": iso_format(before_now(seconds=1)), "stacktrace": { "frames": [ { "function": "process_suspect_commits", "abs_path": "/usr/src/sentry/src/sentry/tasks/groupowner.py", "module": "sentry.tasks.groupowner", "in_app": True, "lineno": 48, "filename": "sentry/tasks/groupowner.py", }, ] }, "tags": {"sentry:release": self.release.version}, "fingerprint": ["put-me-in-the-control-group"], }, project_id=self.project.id, ) self.user_2 = self.create_user("*****@*****.**", is_superuser=True) self.create_member(teams=[self.team], user=self.user_2, organization=self.organization) self.user_3 = self.create_user("*****@*****.**", is_superuser=True) self.create_member(teams=[self.team], user=self.user_3, organization=self.organization) self.release.set_commits( [ { "id": "a" * 40, "repository": self.repo.name, "author_email": self.user_2.email, "author_name": "joe", "message": "i fixed another bug", "patch_set": [{"path": "src/sentry/tasks/groupowner.py", "type": "M"}], } ] ) assert event_2.group == self.event.group assert event_3.group == self.event.group self.set_release_commits(self.user_2.email) process_suspect_commits(event_2) assert GroupOwner.objects.filter(group=self.event.group).count() == 2 assert GroupOwner.objects.filter(group=self.event.group, user=self.user).exists() assert GroupOwner.objects.filter(group=event_2.group, user=self.user_2).exists() self.set_release_commits(self.user_3.email) process_suspect_commits(event_3) assert GroupOwner.objects.filter(group=self.event.group).count() == 2 assert GroupOwner.objects.filter(group=self.event.group, user=self.user).exists() assert GroupOwner.objects.filter(group=event_2.group, user=self.user_2).exists() assert not GroupOwner.objects.filter(group=event_2.group, user=self.user_3).exists() go = GroupOwner.objects.filter(group=event_2.group, user=self.user_2).first() go.date_added = timezone.now() - PREFERRED_GROUP_OWNER_AGE * 2 go.save() self.set_release_commits(self.user_3.email) process_suspect_commits(event_3) # Won't be processed because the cache is present and this group has owners assert GroupOwner.objects.filter(group=self.event.group).count() == 2 assert GroupOwner.objects.filter(group=self.event.group, user=self.user).exists() assert not GroupOwner.objects.filter(group=event_2.group, user=self.user_2).exists() assert GroupOwner.objects.filter(group=event_2.group, user=self.user_3).exists()
def test_transaction_vitals_filtering(self, mock_now): mock_now.return_value = before_now().replace(tzinfo=pytz.utc) vitals_path = u"/organizations/{}/performance/summary/vitals/?{}".format( self.org.slug, urlencode({ "transaction": "/country_by_code/", "project": self.project.id }), ) # Create transactions for seconds in range(3): event_data = load_data("transaction", timestamp=before_now(minutes=2)) event_data["contexts"]["trace"]["op"] = "pageload" event_data["event_id"] = ("c" * 31) + hex(seconds)[2:] event_data["measurements"]["fp"]["value"] = seconds * 10 event_data["measurements"]["fcp"]["value"] = seconds * 10 event_data["measurements"]["lcp"]["value"] = seconds * 10 event_data["measurements"]["fid"]["value"] = seconds * 10 event_data["measurements"]["cls"]["value"] = seconds / 10.0 self.store_event(data=event_data, project_id=self.project.id) # add anchor point event_data = load_data("transaction", timestamp=before_now(minutes=1)) event_data["contexts"]["trace"]["op"] = "pageload" event_data["event_id"] = "a" * 32 event_data["measurements"]["fp"]["value"] = 3000 event_data["measurements"]["fcp"]["value"] = 3000 event_data["measurements"]["lcp"]["value"] = 3000 event_data["measurements"]["fid"]["value"] = 3000 event_data["measurements"]["cls"]["value"] = 0.3 self.store_event(data=event_data, project_id=self.project.id) # add outlier event_data = load_data("transaction", timestamp=before_now(minutes=1)) event_data["contexts"]["trace"]["op"] = "pageload" event_data["event_id"] = "b" * 32 event_data["measurements"]["fp"]["value"] = 3000000000 event_data["measurements"]["fcp"]["value"] = 3000000000 event_data["measurements"]["lcp"]["value"] = 3000000000 event_data["measurements"]["fid"]["value"] = 3000000000 event_data["measurements"]["cls"]["value"] = 3000000000 self.store_event(data=event_data, project_id=self.project.id) self.wait_for_event_count(self.project.id, 5) with self.feature(FEATURE_NAMES): self.browser.get(vitals_path) self.page.wait_until_loaded() self.browser.wait_until_not('[data-test-id="stats-loading"]') self.browser.snapshot("real user monitoring - exclude outliers") self.browser.element( xpath="//button//span[contains(text(), 'Exclude Outliers')]" ).click() self.browser.element( xpath="//li//span[contains(text(), 'View All')]").click() self.browser.snapshot("real user monitoring - view all data")
def setUp(self): """ Span structure: root gen1-0 gen2-0 gen3-0 gen1-1 gen2-1 gen1-2 gen2-2 """ super().setUp() self.login_as(user=self.user) self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0) self.root_span_ids = [uuid4().hex[:16] for _ in range(3)] self.trace_id = uuid4().hex self.root_event = self.create_event( trace=self.trace_id, transaction="root", spans=[{ "same_process_as_parent": True, "op": "http", "description": f"GET gen1-{i}", "span_id": root_span_id, "trace_id": self.trace_id, } for i, root_span_id in enumerate(self.root_span_ids)], parent_span_id=None, project_id=self.project.id, duration=3000, ) # First Generation self.gen1_span_ids = [uuid4().hex[:16] for _ in range(3)] self.gen1_events = [ self.create_event( trace=self.trace_id, transaction=f"/transaction/gen1-{i}", spans=[{ "same_process_as_parent": True, "op": "http", "description": f"GET gen2-{i}", "span_id": gen1_span_id, "trace_id": self.trace_id, }], parent_span_id=root_span_id, project_id=self.create_project( organization=self.organization).id, duration=2000, ) for i, (root_span_id, gen1_span_id ) in enumerate(zip(self.root_span_ids, self.gen1_span_ids)) ] # Second Generation self.gen2_span_ids = [uuid4().hex[:16] for _ in range(3)] self.gen2_events = [ self.create_event( trace=self.trace_id, transaction=f"/transaction/gen2-{i}", spans=[{ "same_process_as_parent": True, "op": "http", "description": f"GET gen3-{i}" if i == 0 else f"SPAN gen3-{i}", "span_id": gen2_span_id, "trace_id": self.trace_id, }], parent_span_id=gen1_span_id, project_id=self.create_project( organization=self.organization).id, duration=1000, ) for i, (gen1_span_id, gen2_span_id ) in enumerate(zip(self.gen1_span_ids, self.gen2_span_ids)) ] # Third generation self.gen3_event = self.create_event( trace=self.trace_id, transaction="/transaction/gen3-0", spans=[], project_id=self.create_project(organization=self.organization).id, parent_span_id=self.gen2_span_ids[0], duration=500, ) self.url = reverse( self.url_name, kwargs={ "organization_slug": self.project.organization.slug, "trace_id": self.trace_id }, )
from __future__ import absolute_import from django.utils import timezone import pytz from sentry.testutils import AcceptanceTestCase, SnubaTestCase from sentry.testutils.helpers.datetime import before_now from sentry.incidents.logic import create_incident from sentry.incidents.models import IncidentType from sentry.snuba.models import QueryAggregations FEATURE_NAME = "organizations:incidents" event_time = before_now(days=3).replace(tzinfo=pytz.utc) class OrganizationIncidentsListTest(AcceptanceTestCase, SnubaTestCase): def setUp(self): super(OrganizationIncidentsListTest, self).setUp() self.login_as(self.user) self.path = u"/organizations/{}/incidents/".format( self.organization.slug) def test_empty_incidents(self): with self.feature(FEATURE_NAME): self.browser.get(self.path) self.browser.wait_until_not(".loading-indicator") self.browser.snapshot("incidents - empty state") def test_incidents_list(self): incident = create_incident(
def setUp(self): super(DeleteGroupTest, self).setUp() self.event_id = "a" * 32 self.event_id2 = "b" * 32 self.event_id3 = "c" * 32 self.project = self.create_project() self.event = self.store_event( data={ "event_id": self.event_id, "tags": { "foo": "bar" }, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group1"], }, project_id=self.project.id, ) self.store_event( data={ "event_id": self.event_id2, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group1"], }, project_id=self.project.id, ) self.store_event( data={ "event_id": self.event_id3, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group2"], }, project_id=self.project.id, ) group = self.event.group UserReport.objects.create(group_id=group.id, project_id=self.event.project_id, name="With group id") UserReport.objects.create(event_id=self.event.event_id, project_id=self.event.project_id, name="With event id") EventAttachment.objects.create( event_id=self.event.event_id, project_id=self.event.project_id, file=File.objects.create(name="hello.png", type="image/png"), name="hello.png", ) GroupAssignee.objects.create(group=group, project=self.project, user=self.user) GroupHash.objects.create(project=self.project, group=group, hash=uuid4().hex) GroupMeta.objects.create(group=group, key="foo", value="bar") GroupRedirect.objects.create(group_id=group.id, previous_group_id=1) self.node_id = Event.generate_node_id(self.project.id, self.event_id) self.node_id2 = Event.generate_node_id(self.project.id, self.event_id2) self.node_id3 = Event.generate_node_id(self.project.id, self.event_id3)
def create_event(self, project_id): return ( self.store_event( data={"timestamp": iso_format(before_now(minutes=1))}, project_id=project_id ), )
def test_simple(self): user = self.create_user() project = self.create_project() project2 = self.create_project(organization=project.organization) release_version = uuid4().hex release = Release.objects.create( organization_id=project.organization_id, version=release_version) release.add_project(project) release.add_project(project2) ReleaseProject.objects.filter(release=release, project=project).update(new_groups=1) ReleaseProject.objects.filter(release=release, project=project2).update(new_groups=1) self.store_event( data={ "timestamp": iso_format(before_now(seconds=1)), "release": release_version, "environment": "prod", }, project_id=project.id, ) release = Release.objects.get(version=release_version) commit_author = CommitAuthor.objects.create( name="stebe", email="*****@*****.**", organization_id=project.organization_id) commit = Commit.objects.create( organization_id=project.organization_id, repository_id=1, key="abc", author=commit_author, message="waddap", ) ReleaseCommit.objects.create( organization_id=project.organization_id, project_id=project.id, release=release, commit=commit, order=1, ) release.update(authors=[str(commit_author.id)], commit_count=1, last_commit_id=commit.id) result = serialize(release, user) assert result["version"] == release.version # should be sum of all projects assert result["newGroups"] == 2 tagvalue1 = tagstore.get_tag_value(project.id, None, "sentry:release", release_version) assert result["lastEvent"] == tagvalue1.last_seen assert result["commitCount"] == 1 assert result["authors"] == [{ "name": "stebe", "email": "*****@*****.**" }] assert result["version"] == release.version assert result["versionInfo"]["package"] is None assert result["versionInfo"]["version"]["raw"] == release_version assert result["versionInfo"]["buildHash"] == release_version assert result["versionInfo"]["description"] == release_version[:12] current_formatted_datetime = datetime.utcnow().strftime( "%Y-%m-%dT%H:%M:%S+00:00") current_project_meta = { "prev_release_version": "[email protected]", "next_release_version": "[email protected]", "sessions_lower_bound": current_formatted_datetime, "sessions_upper_bound": current_formatted_datetime, "first_release_version": "[email protected]", "last_release_version": "[email protected]", } result = serialize(release, user, project=project, current_project_meta=current_project_meta) assert result["newGroups"] == 1 assert result["firstEvent"] == tagvalue1.first_seen assert result["lastEvent"] == tagvalue1.last_seen assert (result["currentProjectMeta"]["prevReleaseVersion"] == current_project_meta["prev_release_version"]) assert (result["currentProjectMeta"]["nextReleaseVersion"] == current_project_meta["next_release_version"]) assert (result["currentProjectMeta"]["sessionsLowerBound"] == current_project_meta["sessions_lower_bound"]) assert (result["currentProjectMeta"]["sessionsUpperBound"] == current_project_meta["sessions_upper_bound"]) assert (result["currentProjectMeta"]["firstReleaseVersion"] == current_project_meta["first_release_version"]) assert (result["currentProjectMeta"]["lastReleaseVersion"] == current_project_meta["last_release_version"])
def test_onboarding_complete(self): now = timezone.now() user = self.create_user(email="*****@*****.**") project = self.create_project(first_event=now) second_project = self.create_project(first_event=now) second_event = self.store_event( data={ "platform": "python", "message": "python error message" }, project_id=second_project.id, ) event = self.store_event( data={ "event_id": "a" * 32, "platform": "javascript", "timestamp": iso_format(before_now(minutes=1)), "tags": { "sentry:release": "e1b5d1900526feaf20fe2bc9cad83d392136030a", "sentry:user": "******", }, "user": { "ip_address": "0.0.0.0", "id": "41656", "email": "*****@*****.**" }, "exception": { "values": [{ "stacktrace": { "frames": [{ "data": { "sourcemap": "https://media.sentry.io/_static/29e365f8b0d923bc123e8afa38d890c3/sentry/dist/vendor.js.map" } }] }, "type": "TypeError", }] }, }, project_id=project.id, ) member = self.create_member(organization=self.organization, teams=[self.team], user=user) event_processed.send(project=project, event=event, sender=type(project)) project_created.send(project=project, user=user, sender=type(project)) project_created.send(project=second_project, user=user, sender=type(second_project)) first_event_received.send(project=project, event=event, sender=type(project)) first_event_received.send(project=second_project, event=second_event, sender=type(second_project)) member_joined.send(member=member, organization=self.organization, sender=type(member)) plugin_enabled.send( plugin=IssueTrackingPlugin(), project=project, user=user, sender=type(IssueTrackingPlugin), ) issue_tracker_used.send( plugin=IssueTrackingPlugin(), project=project, user=user, sender=type(IssueTrackingPlugin), ) alert_rule_created.send(rule=Rule(id=1), project=self.project, user=self.user, sender=type(Rule)) assert (OrganizationOption.objects.filter( organization=self.organization, key="onboarding:complete").count() == 1)
def setUp(self): super(JiraIntegrationTest, self).setUp() self.min_ago = iso_format(before_now(minutes=1)) self.login_as(self.user)
def setUp(self): super(OrganizationEventsStatsTopNEvents, self).setUp() self.login_as(user=self.user) self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0) self.project = self.create_project() self.project2 = self.create_project() self.user2 = self.create_user() transaction_data = load_data("transaction") transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2)) transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=4)) self.event_data = [ { "data": { "message": "poof", "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), "user": {"email": self.user.email}, "fingerprint": ["group1"], }, "project": self.project2, "count": 7, }, { "data": { "message": "voof", "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)), "fingerprint": ["group2"], "user": {"email": self.user2.email}, }, "project": self.project2, "count": 6, }, { "data": { "message": "very bad", "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), "fingerprint": ["group3"], "user": {"email": "*****@*****.**"}, }, "project": self.project, "count": 5, }, { "data": { "message": "oh no", "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), "fingerprint": ["group4"], "user": {"email": "*****@*****.**"}, }, "project": self.project, "count": 4, }, {"data": transaction_data, "project": self.project, "count": 3}, # Not in the top 5 { "data": { "message": "sorta bad", "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), "fingerprint": ["group5"], "user": {"email": "*****@*****.**"}, }, "project": self.project, "count": 2, }, { "data": { "message": "not so bad", "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), "fingerprint": ["group6"], "user": {"email": "*****@*****.**"}, }, "project": self.project, "count": 1, }, ] self.events = [] for index, event_data in enumerate(self.event_data): data = event_data["data"].copy() for i in range(event_data["count"]): data["event_id"] = "{}{}".format(index, i) * 16 event = self.store_event(data, project_id=event_data["project"].id) self.events.append(event) self.transaction = self.events[4] self.enabled_features = { "organizations:discover-basic": True, } self.url = reverse( "sentry-api-0-organization-events-stats", kwargs={"organization_slug": self.project.organization.slug}, )
def setUp(self): super(OrganizationEventsGeoEndpointTest, self).setUp() self.min_ago = iso_format(before_now(minutes=1))
def test_simple(self): project1 = self.create_project(teams=[self.team], slug="foo") project2 = self.create_project(teams=[self.team], slug="bar") group1 = self.create_group(checksum="a" * 32, project=project1, times_seen=10) group2 = self.create_group(checksum="b" * 32, project=project2, times_seen=5, first_seen=before_now(days=20)) GroupAssignee.objects.assign(group1, self.user) GroupAssignee.objects.assign(group2, self.user) gh1 = self.create_group_history( group1, GroupHistoryStatus.UNRESOLVED, actor=self.user.actor, date_added=before_now(days=5), ) self.create_group_history( group1, GroupHistoryStatus.RESOLVED, actor=self.user.actor, prev_history=gh1, date_added=before_now(days=2), ) gh2 = self.create_group_history( group2, GroupHistoryStatus.UNRESOLVED, actor=self.user.actor, date_added=before_now(days=10), ) self.create_group_history( group2, GroupHistoryStatus.RESOLVED, actor=self.user.actor, prev_history=gh2, ) today = str(now().date()) yesterday = str((now() - timedelta(days=1)).date()) two_days_ago = str((now() - timedelta(days=2)).date()) self.login_as(user=self.user) response = self.get_success_response(self.team.organization.slug, self.team.slug, statsPeriod="14d") assert len(response.data) == 14 assert response.data[today]["avg"] == timedelta( days=10).total_seconds() assert response.data[two_days_ago]["avg"] == timedelta( days=3).total_seconds() assert response.data[yesterday]["avg"] == 0 # Lower "todays" average by adding another resolution, but this time 5 days instead of 10 (avg is 7.5 now) gh2 = self.create_group_history( group2, GroupHistoryStatus.UNRESOLVED, actor=self.user.actor, date_added=before_now(days=5), ) self.create_group_history( group2, GroupHistoryStatus.RESOLVED, actor=self.user.actor, prev_history=gh2, ) # making sure it doesnt bork anything self.create_group_history( group2, GroupHistoryStatus.DELETED, actor=self.user.actor, prev_history=gh2, ) # Make sure that if we have a `GroupHistory` row with no prev history then we don't crash. self.create_group_history( group2, GroupHistoryStatus.RESOLVED, actor=self.user.actor, ) response = self.get_success_response(self.team.organization.slug, self.team.slug) assert len(response.data) == 90 assert response.data[today]["avg"] == timedelta( days=11, hours=16).total_seconds() assert response.data[two_days_ago]["avg"] == timedelta( days=3).total_seconds() assert response.data[yesterday]["avg"] == 0
def setUp(self): super(OrganizationEventsMeasurementsHistogramEndpointTest, self).setUp() self.min_ago = iso_format(before_now(minutes=1)) self.data = load_data("transaction")
def setUp(self): super(JavascriptIntegrationTest, self).setUp() self.min_ago = iso_format(before_now(minutes=1))
def test_resolving_inline(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ "organization_slug": self.project.organization.slug, "project_slug": self.project.slug, }, ) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, "w") f.writestr("proguard/%s.txt" % PROGUARD_INLINE_UUID, PROGUARD_INLINE_SOURCE) f.writestr("ignored-file.txt", b"This is just some stuff") f.close() response = self.client.post( url, { "file": SimpleUploadedFile("symbols.zip", out.getvalue(), content_type="application/zip") }, format="multipart", ) assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "user": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "java", "debug_meta": { "images": [{ "type": "proguard", "uuid": PROGUARD_INLINE_UUID }] }, "exception": { "values": [{ "stacktrace": { "frames": [ { "function": "onClick", "abs_path": None, "module": "e.a.c.a", "filename": None, "lineno": 2, }, { "function": "t", "abs_path": None, "module": "io.sentry.sample.MainActivity", "filename": "MainActivity.java", "lineno": 1, }, ] }, "module": "org.a.b", "type": "g$a", "value": "Shit broke yo", }] }, "timestamp": iso_format(before_now(seconds=1)), } event = self.post_and_retrieve_event(event_data) if not self.use_relay(): # We measure the number of queries after an initial post, # because there are many queries polluting the array # before the actual "processing" happens (like, auth_user) with self.assertWriteQueries({ "nodestore_node": 2, "sentry_eventuser": 1, "sentry_groupedmessage": 1, "sentry_userreport": 1, }): self.post_and_retrieve_event(event_data) exc = event.interfaces["exception"].values[0] bt = exc.stacktrace frames = bt.frames assert len(frames) == 4 assert frames[0].function == "onClick" assert frames[ 0].module == "io.sentry.sample.-$$Lambda$r3Avcbztes2hicEObh02jjhQqd4" assert frames[1].filename == "MainActivity.java" assert frames[1].module == "io.sentry.sample.MainActivity" assert frames[1].function == "onClickHandler" assert frames[1].lineno == 40 assert frames[2].function == "foo" assert frames[2].lineno == 44 assert frames[3].function == "bar" assert frames[3].lineno == 54 assert frames[3].filename == "MainActivity.java" assert frames[3].module == "io.sentry.sample.MainActivity"
def test_sourcemap_expansion(self): responses.add( responses.GET, "http://example.com/test.js", body=load_fixture("test.js"), content_type="application/javascript", ) responses.add( responses.GET, "http://example.com/test.min.js", body=load_fixture("test.min.js"), content_type="application/javascript", ) responses.add( responses.GET, "http://example.com/test.map", body=load_fixture("test.map"), content_type="application/json", ) responses.add(responses.GET, "http://example.com/index.html", body="Not Found", status=404) min_ago = iso_format(before_now(minutes=1)) data = { "timestamp": min_ago, "message": "hello", "platform": "javascript", "exception": { "values": [{ "type": "Error", "stacktrace": { "frames": json.loads(load_fixture("minifiedError.json"))[::-1] }, }] }, } event = self.post_and_retrieve_event(data) exception = event.interfaces["exception"] frame_list = exception.values[0].stacktrace.frames assert len(frame_list) == 4 assert frame_list[0].function == "produceStack" assert frame_list[0].lineno == 6 assert frame_list[0].filename == "index.html" assert frame_list[1].function == "test" assert frame_list[1].lineno == 20 assert frame_list[1].filename == "test.js" assert frame_list[2].function == "invoke" assert frame_list[2].lineno == 15 assert frame_list[2].filename == "test.js" assert frame_list[3].function == "onFailure" assert frame_list[3].lineno == 5 assert frame_list[3].filename == "test.js"
def test_error_on_resolving(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ "organization_slug": self.project.organization.slug, "project_slug": self.project.slug, }, ) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, "w") f.writestr("proguard/%s.txt" % PROGUARD_BUG_UUID, PROGUARD_BUG_SOURCE) f.close() response = self.client.post( url, { "file": SimpleUploadedFile("symbols.zip", out.getvalue(), content_type="application/zip") }, format="multipart", ) assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "user": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "java", "debug_meta": { "images": [{ "type": "proguard", "uuid": PROGUARD_BUG_UUID }] }, "exception": { "values": [{ "stacktrace": { "frames": [ { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 67, }, { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 69, }, ] }, "type": "RuntimeException", "value": "Shit broke yo", }] }, "timestamp": iso_format(before_now(seconds=1)), } event = self.post_and_retrieve_event(event_data) assert len(event.data["errors"]) == 1 assert event.data["errors"][0] == { "mapping_uuid": "071207ac-b491-4a74-957c-2c94fd9594f2", "type": "proguard_missing_lineno", }
def setUp(self): super().setUp() self.user1 = self.create_user() self.user2 = self.create_user() self.user3 = self.create_user() self.user4 = self.create_user() self.user5 = self.create_user() # this user has no events self.user_ids = [self.user1.id, self.user2.id, self.user3.id, self.user4.id, self.user5.id] self.team1 = self.create_team() self.team2 = self.create_team() self.team3 = self.create_team() self.project = self.create_project( teams=[self.team1, self.team2, self.team3], fire_project_created=True ) self.create_member(user=self.user1, organization=self.organization, teams=[self.team1]) self.create_member(user=self.user2, organization=self.organization, teams=[self.team2]) self.create_member( user=self.user3, organization=self.organization, teams=[self.team1, self.team2] ) self.create_member(user=self.user4, organization=self.organization, teams=[self.team3]) self.create_member(user=self.user5, organization=self.organization, teams=[self.team3]) self.team1_events = self.create_events_from_filenames( self.project, ["hello.py", "goodbye.py", "hola.py", "adios.py"] ) self.team2_events = self.create_events_from_filenames( self.project, ["old.cbl", "retro.cbl", "cool.cbl", "gem.cbl"] ) self.user4_events = [ self.store_event( data={ "stacktrace": {"frames": [{"lineno": 1, "filename": "foo.bar"}]}, "request": {"url": "helloworld.org"}, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["user4group1"], }, project_id=self.project.id, ), self.store_event( data={ "stacktrace": {"frames": [{"lineno": 1, "filename": "bar.foo"}]}, "request": {"url": "helloworld.org"}, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["user4group2"], }, project_id=self.project.id, ), ] self.team1_matcher = Matcher("path", "*.py") self.team2_matcher = Matcher("path", "*.cbl") self.user4_matcher = Matcher("url", "*.org") self.project_ownership = ProjectOwnership.objects.create( project_id=self.project.id, schema=dump_schema( [ Rule( self.team1_matcher, [Owner("team", self.team1.slug), Owner("user", self.user3.email)], ), Rule(self.team2_matcher, [Owner("team", self.team2.slug)]), Rule(self.user4_matcher, [Owner("user", self.user4.email)]), ] ), fallthrough=True, )
def test_basic_resolving(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ "organization_slug": self.project.organization.slug, "project_slug": self.project.slug, }, ) self.login_as(user=self.user) out = BytesIO() f = zipfile.ZipFile(out, "w") f.writestr("proguard/%s.txt" % PROGUARD_UUID, PROGUARD_SOURCE) f.writestr("ignored-file.txt", b"This is just some stuff") f.close() response = self.client.post( url, { "file": SimpleUploadedFile("symbols.zip", out.getvalue(), content_type="application/zip") }, format="multipart", ) assert response.status_code == 201, response.content assert len(response.data) == 1 event_data = { "user": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "java", "debug_meta": { "images": [{ "type": "proguard", "uuid": PROGUARD_UUID }] }, "exception": { "values": [{ "stacktrace": { "frames": [ { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 67, }, { "function": "a", "abs_path": None, "module": "org.a.b.g$a", "filename": None, "lineno": 69, }, ] }, "module": "org.a.b", "type": "g$a", "value": "Shit broke yo", }] }, "timestamp": iso_format(before_now(seconds=1)), } event = self.post_and_retrieve_event(event_data) if not self.use_relay(): # We measure the number of queries after an initial post, # because there are many queries polluting the array # before the actual "processing" happens (like, auth_user) with self.assertWriteQueries({ "nodestore_node": 2, "sentry_eventuser": 1, "sentry_groupedmessage": 1, "sentry_userreport": 1, }): self.post_and_retrieve_event(event_data) exc = event.interfaces["exception"].values[0] bt = exc.stacktrace frames = bt.frames assert exc.type == "Util$ClassContextSecurityManager" assert exc.module == "org.slf4j.helpers" assert frames[0].function == "getClassContext" assert frames[ 0].module == "org.slf4j.helpers.Util$ClassContextSecurityManager" assert frames[1].function == "getExtraClassContext" assert frames[ 1].module == "org.slf4j.helpers.Util$ClassContextSecurityManager" assert event.culprit == ( "org.slf4j.helpers.Util$ClassContextSecurityManager " "in getExtraClassContext")
def test_usage(self, mock_now): mock_now.return_value = before_now().replace(tzinfo=pytz.utc)
"frames": [ { "platform": "foobar", "function": "hi" }, { "function": "unknown", "instruction_addr": "0x0000000100000fa0" }, ] }, "type": "Fail", "value": "fail", }] }, "timestamp": iso_format(before_now(seconds=1)), } class SymbolicatorResolvingIntegrationTest(RelayStoreHelper, TransactionTestCase): # For these tests to run, write `symbolicator.enabled: true` into your # `~/.sentry/config.yml` and run `sentry devservices up` @pytest.fixture(autouse=True) def initialize(self, live_server): self.project.update_option("sentry:builtin_symbol_sources", []) new_prefix = live_server.url with patch("sentry.auth.system.is_internal_ip", return_value=True), self.options(
def setUp(self): super(SnubaEventStorageTest, self).setUp() self.min_ago = iso_format(before_now(minutes=1)) self.two_min_ago = iso_format(before_now(minutes=2)) self.project1 = self.create_project() self.project2 = self.create_project() self.event1 = self.store_event( data={ "event_id": "a" * 32, "type": "default", "platform": "python", "fingerprint": ["group1"], "timestamp": self.two_min_ago, "tags": { "foo": "1" }, }, project_id=self.project1.id, ) self.event2 = self.store_event( data={ "event_id": "b" * 32, "type": "default", "platform": "python", "fingerprint": ["group1"], "timestamp": self.min_ago, "tags": { "foo": "1" }, }, project_id=self.project2.id, ) self.event3 = self.store_event( data={ "event_id": "c" * 32, "type": "default", "platform": "python", "fingerprint": ["group2"], "timestamp": self.min_ago, "tags": { "foo": "1" }, }, project_id=self.project2.id, ) event_data = load_data("transaction") event_data["timestamp"] = iso_format(before_now(minutes=1)) event_data["start_timestamp"] = iso_format( before_now(minutes=1, seconds=1)) event_data["event_id"] = "d" * 32 self.transaction_event = self.store_event(data=event_data, project_id=self.project2.id) event_data_2 = load_data("transaction") event_data_2["timestamp"] = iso_format(before_now(seconds=30)) event_data_2["start_timestamp"] = iso_format(before_now(seconds=31)) event_data_2["event_id"] = "e" * 32 self.transaction_event_2 = self.store_event( data=event_data_2, project_id=self.project2.id) self.eventstore = SnubaEventStorage()
def test_debug_id_resolving(self): file = File.objects.create(name="crash.pdb", type="default", headers={"Content-Type": "text/x-breakpad"}) path = get_fixture_path("windows.sym") with open(path, "rb") as f: file.putfile(f) ProjectDebugFile.objects.create( file=file, object_name="crash.pdb", cpu_name="x86", project=self.project, debug_id="3249d99d-0c40-4931-8610-f4e4fb0b6936-1", code_id="5AB380779000", ) self.login_as(user=self.user) event_data = { "contexts": { "device": { "arch": "x86" }, "os": { "build": u"", "name": "Windows", "type": "os", "version": u"10.0.14393" }, }, "debug_meta": { "images": [{ "id": u"3249d99d-0c40-4931-8610-f4e4fb0b6936-1", "image_addr": "0x2a0000", "image_size": 36864, "name": u"C:\\projects\\breakpad-tools\\windows\\Release\\crash.exe", "type": "symbolic", }] }, "exception": { "stacktrace": { "frames": [{ "function": "<unknown>", "instruction_addr": "0x2a2a3d", "package": u"C:\\projects\\breakpad-tools\\windows\\Release\\crash.exe", }] }, "thread_id": 1636, "type": u"EXCEPTION_ACCESS_VIOLATION_WRITE", "value": u"Fatal Error: EXCEPTION_ACCESS_VIOLATION_WRITE", }, "platform": "native", "timestamp": iso_format(before_now(seconds=1)), } event = self.post_and_retrieve_event(event_data) assert event.data["culprit"] == "main" insta_snapshot_stacktrace_data(self, event.data)
def test_notify_with_suspect_commits(self): repo = Repository.objects.create(organization_id=self.organization.id, name=self.organization.id) release = self.create_release(project=self.project, version="v12") release.set_commits([{ "id": "a" * 40, "repository": repo.name, "author_email": "*****@*****.**", "author_name": "Bob", "message": "i fixed a bug", "patch_set": [{ "path": "src/sentry/models/release.py", "type": "M" }], }]) event = self.store_event( data={ "message": "Kaboom!", "platform": "python", "timestamp": iso_format(before_now(seconds=1)), "stacktrace": { "frames": [ { "function": "handle_set_commits", "abs_path": "/usr/src/sentry/src/sentry/tasks.py", "module": "sentry.tasks", "in_app": True, "lineno": 30, "filename": "sentry/tasks.py", }, { "function": "set_commits", "abs_path": "/usr/src/sentry/src/sentry/models/release.py", "module": "sentry.models.release", "in_app": True, "lineno": 39, "filename": "sentry/models/release.py", }, ] }, "tags": { "sentry:release": release.version }, }, project_id=self.project.id, ) with self.tasks(): notification = Notification(event=event) self.plugin.notify(notification) assert len(mail.outbox) >= 1 msg = mail.outbox[-1] assert "Suspect Commits" in msg.body
def setUp(self): super().setUp() self.min_ago = before_now(minutes=1)
def test(self): history = [] rule = Rule.objects.create(project=self.event.project) for i in range(3): history.append( RuleFireHistory( project=rule.project, rule=rule, group=self.group, date_added=before_now(days=i + 1), )) group_2 = self.create_group() history.append( RuleFireHistory(project=rule.project, rule=rule, group=group_2, date_added=before_now(days=1))) group_3 = self.create_group() for i in range(2): history.append( RuleFireHistory( project=rule.project, rule=rule, group=group_3, date_added=before_now(days=i + 1), )) rule_2 = Rule.objects.create(project=self.event.project) history.append( RuleFireHistory(project=rule.project, rule=rule_2, group=self.group, date_added=before_now(days=0))) RuleFireHistory.objects.bulk_create(history) base_triggered_date = before_now(days=1).replace(tzinfo=pytz.UTC) self.run_test( rule, before_now(days=6), before_now(days=0), [ RuleGroupHistory( self.group, count=3, last_triggered=base_triggered_date), RuleGroupHistory( group_3, count=2, last_triggered=base_triggered_date), RuleGroupHistory( group_2, count=1, last_triggered=base_triggered_date), ], ) result = self.run_test( rule, before_now(days=6), before_now(days=0), [ RuleGroupHistory( self.group, count=3, last_triggered=base_triggered_date), ], per_page=1, ) result = self.run_test( rule, before_now(days=6), before_now(days=0), [ RuleGroupHistory( group_3, count=2, last_triggered=base_triggered_date), ], cursor=result.next, per_page=1, ) self.run_test( rule, before_now(days=6), before_now(days=0), [ RuleGroupHistory( group_2, count=1, last_triggered=base_triggered_date), ], cursor=result.next, per_page=1, ) self.run_test( rule, before_now(days=1), before_now(days=0), [ RuleGroupHistory( self.group, count=1, last_triggered=base_triggered_date), RuleGroupHistory( group_2, count=1, last_triggered=base_triggered_date), RuleGroupHistory( group_3, count=1, last_triggered=base_triggered_date), ], ) self.run_test( rule, before_now(days=3), before_now(days=2), [ RuleGroupHistory( self.group, count=1, last_triggered=base_triggered_date - timedelta(days=2)), ], )
def setUp(self): super().setUp() self.min_ago = iso_format(before_now(minutes=1)) self.login_as(self.user)
def generate_transaction(trace=None, span=None): end_datetime = before_now(minutes=1) start_datetime = end_datetime - timedelta(milliseconds=500) event_data = load_data( "transaction", timestamp=end_datetime, start_timestamp=start_datetime, trace=trace, span_id=span, ) event_data.update({"event_id": "a" * 32}) # generate and build up span tree reference_span = event_data["spans"][0] parent_span_id = reference_span["parent_span_id"] span_tree_blueprint = { "a": {}, "b": { "bb": { "bbb": { "bbbb": "bbbbb" } } }, "c": {}, "d": {}, "e": {}, } time_offsets = { "a": (timedelta(), timedelta(milliseconds=10)), "b": (timedelta(milliseconds=120), timedelta(milliseconds=250)), "bb": (timedelta(milliseconds=130), timedelta(milliseconds=10)), "bbb": (timedelta(milliseconds=140), timedelta(milliseconds=10)), "bbbb": (timedelta(milliseconds=150), timedelta(milliseconds=10)), "bbbbb": (timedelta(milliseconds=160), timedelta(milliseconds=90)), "c": (timedelta(milliseconds=260), timedelta(milliseconds=100)), "d": (timedelta(milliseconds=375), timedelta(milliseconds=50)), "e": (timedelta(milliseconds=400), timedelta(milliseconds=100)), } def build_span_tree(span_tree, spans, parent_span_id): for span_id, child in sorted(span_tree.items(), key=lambda item: item[0]): span = copy.deepcopy(reference_span) # non-leaf node span span["parent_span_id"] = parent_span_id.ljust(16, "0") span["span_id"] = span_id.ljust(16, "0") (start_delta, span_length) = time_offsets.get(span_id, (timedelta(), timedelta())) span_start_time = start_datetime + start_delta span["start_timestamp"] = timestamp_format(span_start_time) span["timestamp"] = timestamp_format(span_start_time + span_length) spans.append(span) if isinstance(child, dict): spans = build_span_tree(child, spans, span_id) elif isinstance(child, str): parent_span_id = span_id span_id = child span = copy.deepcopy(reference_span) # leaf node span span["parent_span_id"] = parent_span_id.ljust(16, "0") span["span_id"] = span_id.ljust(16, "0") (start_delta, span_length) = time_offsets.get(span_id, (timedelta(), timedelta())) span_start_time = start_datetime + start_delta span["start_timestamp"] = timestamp_format(span_start_time) span["timestamp"] = timestamp_format(span_start_time + span_length) spans.append(span) return spans event_data["spans"] = build_span_tree(span_tree_blueprint, [], parent_span_id) return event_data
def test_post_with_invalid_origin(self): self.project.update_option("sentry:origins", "sentry.io") kwargs = {"message": "hello", "timestamp": iso_format(before_now(seconds=1))} resp = self._postWithReferer(kwargs, referer="https://getsentry.net", protocol="4") assert resp.status_code == 403, resp.content