def test_simple(self): event_id = "a" * 32 event_id_2 = "b" * 32 project = self.create_project() node_id = Event.generate_node_id(project.id, event_id) node_id_2 = Event.generate_node_id(project.id, event_id_2) event = self.store_event( data={ "event_id": event_id, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group1"], }, project_id=project.id, ) self.store_event( data={ "event_id": event_id_2, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group1"], }, project_id=project.id, ) group = event.group group.update(status=GroupStatus.PENDING_DELETION) GroupAssignee.objects.create(group=group, project=project, user=self.user) GroupHash.objects.create(project=project, group=group, hash=uuid4().hex) GroupMeta.objects.create(group=group, key="foo", value="bar") GroupRedirect.objects.create(group_id=group.id, previous_group_id=1) assert nodestore.get(node_id) assert nodestore.get(node_id_2) with self.tasks(): delete_groups(object_ids=[group.id]) assert not Event.objects.filter(id=event.id).exists() assert not GroupRedirect.objects.filter(group_id=group.id).exists() assert not GroupHash.objects.filter(group_id=group.id).exists() assert not Group.objects.filter(id=group.id).exists() assert not nodestore.get(node_id) assert not nodestore.get(node_id_2)
def _get_event_from_storage(self, project_id, event_id): nodestore_sample_rate = options.get("store.nodestore-sample-rate") use_nodestore = random.random() < nodestore_sample_rate if use_nodestore: start = time.time() node_data = nodestore.get( Event.generate_node_id(project_id, event_id)) metrics.timing( "events.store.nodestore.duration", int((time.time() - start) * 1000), tags={"duplicate_found": bool(node_data)}, ) if node_data: return Event(node_data) else: try: event = Event.objects.get(project_id=project_id, event_id=event_id) return event except Event.DoesNotExist: pass return None
def _get_event_instance(self, project_id=None): if options.get("store.use-django-event"): data = self._data event_id = data.get("event_id") platform = data.get("platform") recorded_timestamp = data.get("timestamp") date = datetime.fromtimestamp(recorded_timestamp) date = date.replace(tzinfo=timezone.utc) time_spent = data.get("time_spent") data["node_id"] = Event.generate_node_id(project_id, event_id) return Event( project_id=project_id or self._project.id, event_id=event_id, data=EventDict(data, skip_renormalization=True), time_spent=time_spent, datetime=date, platform=platform, ) else: data = self._data event_id = data.get("event_id") return eventstore.create_event( project_id=project_id or self._project.id, event_id=event_id, group_id=None, data=EventDict(data, skip_renormalization=True), )
def test_simple(self): event_id = "a" * 32 project = self.create_project() node_id = Event.generate_node_id(project.id, event_id) group = self.create_group(project=project) event = self.create_event(group=group, event_id=event_id) EventAttachment.objects.create( event_id=event.event_id, project_id=event.project_id, file=File.objects.create(name="hello.png", type="image/png"), name="hello.png", ) UserReport.objects.create( event_id=event.event_id, project_id=event.project_id, name="Jane Bloggs" ) assert nodestore.get(node_id) is not None deletion = ScheduledDeletion.schedule(event, days=0) deletion.update(in_progress=True) with self.tasks(): run_deletion(deletion.id) assert not Event.objects.filter(id=event.id).exists() assert not EventAttachment.objects.filter( event_id=event.event_id, project_id=project.id ).exists() assert not UserReport.objects.filter( event_id=event.event_id, project_id=project.id ).exists() assert nodestore.get(node_id) is None
def create_event(group=None, project=None, event_id=None, normalize=True, **kwargs): # XXX: Do not use this method for new tests! Prefer `store_event`. if event_id is None: event_id = uuid4().hex kwargs.setdefault('project', project if project else group.project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['stacktrace'] = stacktrace user = kwargs.pop('user', None) if user is not None: kwargs['data']['user'] = user kwargs['data'].setdefault('errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }]) # maintain simple event Factories by supporting the legacy message # parameter just like our API would if 'logentry' not in kwargs['data']: kwargs['data']['logentry'] = { 'message': kwargs['message'] or '<unlabeled event>', } if normalize: manager = EventManager(CanonicalKeyDict(kwargs['data'])) manager.normalize() kwargs['data'] = manager.get_data() kwargs['data'].update(manager.materialize_metadata()) kwargs['message'] = manager.get_search_message() # This is needed so that create_event saves the event in nodestore # under the correct key. This is usually dont in EventManager.save() kwargs['data'].setdefault( 'node_id', Event.generate_node_id(kwargs['project'].id, event_id)) event = Event(event_id=event_id, group=group, **kwargs) if group: EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def create_event(group, event_id=None, normalize=True, **kwargs): # XXX: Do not use this method for new tests! Prefer `store_event`. if event_id is None: event_id = uuid4().hex kwargs.setdefault('project', group.project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['stacktrace'] = stacktrace user = kwargs.pop('user', None) if user is not None: kwargs['data']['user'] = user kwargs['data'].setdefault( 'errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }] ) # maintain simple event Factories by supporting the legacy message # parameter just like our API would if 'logentry' not in kwargs['data']: kwargs['data']['logentry'] = { 'message': kwargs['message'] or '<unlabeled event>', } if normalize: manager = EventManager(CanonicalKeyDict(kwargs['data'])) manager.normalize() kwargs['data'] = manager.get_data() kwargs['data'].update(manager.materialize_metadata()) kwargs['message'] = manager.get_search_message() # This is needed so that create_event saves the event in nodestore # under the correct key. This is usually dont in EventManager.save() kwargs['data'].setdefault( 'node_id', Event.generate_node_id(kwargs['project'].id, event_id) ) event = Event(event_id=event_id, group=group, **kwargs) EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def create_event(group=None, project=None, event_id=None, normalize=True, **kwargs): # XXX: Do not use this method for new tests! Prefer `store_event`. if event_id is None: event_id = uuid4().hex kwargs.setdefault("project", project if project else group.project) kwargs.setdefault("data", copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault("platform", kwargs["data"].get("platform", "python")) kwargs.setdefault("message", kwargs["data"].get("message", "message")) if kwargs.get("tags"): tags = kwargs.pop("tags") if isinstance(tags, dict): tags = list(tags.items()) kwargs["data"]["tags"] = tags if kwargs.get("stacktrace"): stacktrace = kwargs.pop("stacktrace") kwargs["data"]["stacktrace"] = stacktrace user = kwargs.pop("user", None) if user is not None: kwargs["data"]["user"] = user kwargs["data"].setdefault("errors", [{ "type": EventError.INVALID_DATA, "name": "foobar" }]) # maintain simple event Factories by supporting the legacy message # parameter just like our API would if "logentry" not in kwargs["data"]: kwargs["data"]["logentry"] = { "message": kwargs["message"] or "<unlabeled event>" } if normalize: manager = EventManager(CanonicalKeyDict(kwargs["data"])) manager.normalize() kwargs["data"] = manager.get_data() kwargs["data"].update(manager.materialize_metadata()) kwargs["message"] = manager.get_search_message() # This is needed so that create_event saves the event in nodestore # under the correct key. This is usually dont in EventManager.save() kwargs["data"].setdefault( "node_id", Event.generate_node_id(kwargs["project"].id, event_id)) event = Event(event_id=event_id, group=group, **kwargs) # emulate EventManager refs event.data.bind_ref(event) event.save() event.data.save() return event
def test_simple(self): configure_sdk() Hub.current.bind_client(Hub.main.client) with self.tasks(): event_id = raven.captureMessage("internal client test") event = nodestore.get(Event.generate_node_id(settings.SENTRY_PROJECT, event_id)) assert event["project"] == settings.SENTRY_PROJECT assert event["event_id"] == event_id assert event["logentry"]["formatted"] == "internal client test"
def get_child_relations_bulk(self, instance_list): from sentry.models import Event node_ids = [] for i in instance_list: node_id = Event.generate_node_id(i.project_id, i.event_id) node_ids.append(node_id) # Unbind the NodeField so it doesn't attempt to get # get deleted a second time after NodeDeletionTask # runs, when the Event itself is deleted. i.data = None return [BaseRelation({"nodes": node_ids}, NodeDeletionTask)]
def test_dupe_message_id(self, eventstream_insert): # Saves the latest event to nodestore and eventstream project_id = 1 event_id = "a" * 32 node_id = Event.generate_node_id(project_id, event_id) manager = EventManager(make_event(event_id=event_id, message="first")) manager.normalize() manager.save(project_id) assert nodestore.get(node_id)["logentry"]["formatted"] == "first" manager = EventManager(make_event(event_id=event_id, message="second")) manager.normalize() manager.save(project_id) assert nodestore.get(node_id)["logentry"]["formatted"] == "second" assert eventstream_insert.call_count == 2
def test_encoding(self): configure_sdk() Hub.current.bind_client(Hub.main.client) class NotJSONSerializable: pass with self.tasks(): event_id = raven.captureMessage( "check the req", extra={"request": NotJSONSerializable()}) event = nodestore.get( Event.generate_node_id(settings.SENTRY_PROJECT, event_id)) assert event["project"] == settings.SENTRY_PROJECT assert event["logentry"]["formatted"] == "check the req" assert "NotJSONSerializable" in event["extra"]["request"]
def test_simple(self): event_id = "a" * 32 project = self.create_project() node_id = Event.generate_node_id(project.id, event_id) group = self.create_group(project=project) event = self.create_event(group=group, event_id=event_id) EventAttachment.objects.create( event_id=event.event_id, project_id=event.project_id, file=File.objects.create(name="hello.png", type="image/png"), name="hello.png", ) UserReport.objects.create(event_id=event.event_id, project_id=event.project_id, name="Jane Doe") key = "key" value = "value" tk = tagstore.create_tag_key(project_id=project.id, environment_id=self.environment.id, key=key) tv = tagstore.create_tag_value(project_id=project.id, environment_id=self.environment.id, key=key, value=value) tagstore.create_event_tags( event_id=event.id, group_id=group.id, project_id=project.id, environment_id=self.environment.id, tags=[(tk.key, tv.value)], ) assert nodestore.get(node_id) is not None deletion = ScheduledDeletion.schedule(event, days=0) deletion.update(in_progress=True) with self.tasks(): run_deletion(deletion.id) assert not Event.objects.filter(id=event.id).exists() assert not EventAttachment.objects.filter( event_id=event.event_id, project_id=project.id).exists() assert not UserReport.objects.filter(event_id=event.event_id, project_id=project.id).exists() assert not EventTag.objects.filter(event_id=event.id).exists() assert nodestore.get(node_id) is None
def _get_event_instance(self, project_id=None): data = self._data event_id = data.get('event_id') platform = data.get('platform') recorded_timestamp = data.get('timestamp') date = datetime.fromtimestamp(recorded_timestamp) date = date.replace(tzinfo=timezone.utc) time_spent = data.get('time_spent') data['node_id'] = Event.generate_node_id(project_id, event_id) return Event(project_id=project_id or self._project.id, event_id=event_id, data=EventDict(data, skip_renormalization=True), time_spent=time_spent, datetime=date, platform=platform)
def _get_event_instance(self, project_id=None): data = self._data event_id = data.get('event_id') platform = data.get('platform') recorded_timestamp = data.get('timestamp') date = datetime.fromtimestamp(recorded_timestamp) date = date.replace(tzinfo=timezone.utc) time_spent = data.get('time_spent') data['node_id'] = Event.generate_node_id(project_id, event_id) return Event( project_id=project_id or self._project.id, event_id=event_id, data=EventDict(data, skip_renormalization=True), time_spent=time_spent, datetime=date, platform=platform )
def chunk(self): conditions = [] if self.last_event is not None: conditions.extend([ ["timestamp", "<=", self.last_event.timestamp], [ ["timestamp", "<", self.last_event.timestamp], ["event_id", "<", self.last_event.event_id], ], ]) events = eventstore.get_events( filter=eventstore.Filter(conditions=conditions, project_ids=[self.project_id], group_ids=[self.group_id]), limit=self.DEFAULT_CHUNK_SIZE, referrer="deletions.group", orderby=["-timestamp", "-event_id"], ) if not events: return False self.last_event = events[-1] # Remove from nodestore node_ids = [ Event.generate_node_id(self.project_id, event.event_id) for event in events ] nodestore.delete_multi(node_ids) # Remove EventAttachment and UserReport event_ids = [event.event_id for event in events] EventAttachment.objects.filter(event_id__in=event_ids, project_id=self.project_id).delete() UserReport.objects.filter(event_id__in=event_ids, project_id=self.project_id).delete() return True
def setUp(self): super(DeleteGroupTest, self).setUp() self.event_id = "a" * 32 self.event_id2 = "b" * 32 self.event_id3 = "c" * 32 self.project = self.create_project() self.event = self.store_event( data={ "event_id": self.event_id, "tags": { "foo": "bar" }, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group1"], }, project_id=self.project.id, ) self.store_event( data={ "event_id": self.event_id2, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group1"], }, project_id=self.project.id, ) self.store_event( data={ "event_id": self.event_id3, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group2"], }, project_id=self.project.id, ) group = self.event.group UserReport.objects.create(group_id=group.id, project_id=self.event.project_id, name="With group id") UserReport.objects.create(event_id=self.event.event_id, project_id=self.event.project_id, name="With event id") EventAttachment.objects.create( event_id=self.event.event_id, project_id=self.event.project_id, file=File.objects.create(name="hello.png", type="image/png"), name="hello.png", ) GroupAssignee.objects.create(group=group, project=self.project, user=self.user) GroupHash.objects.create(project=self.project, group=group, hash=uuid4().hex) GroupMeta.objects.create(group=group, key="foo", value="bar") GroupRedirect.objects.create(group_id=group.id, previous_group_id=1) self.node_id = Event.generate_node_id(self.project.id, self.event_id) self.node_id2 = Event.generate_node_id(self.project.id, self.event_id2) self.node_id3 = Event.generate_node_id(self.project.id, self.event_id3)
def test_simple(self): EventDataDeletionTask.DEFAULT_CHUNK_SIZE = 1 # test chunking logic event_id = "a" * 32 event_id2 = "b" * 32 event_id3 = "c" * 32 project = self.create_project() node_id = Event.generate_node_id(project.id, event_id) node_id2 = Event.generate_node_id(project.id, event_id2) node_id3 = Event.generate_node_id(project.id, event_id3) event = self.store_event( data={ "event_id": event_id, "tags": { "foo": "bar" }, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group1"], }, project_id=project.id, ) self.store_event( data={ "event_id": event_id2, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group1"], }, project_id=project.id, ) self.store_event( data={ "event_id": event_id3, "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group2"], }, project_id=project.id, ) group = event.group group.update(status=GroupStatus.PENDING_DELETION) project = self.create_project() UserReport.objects.create(group_id=group.id, project_id=event.project_id, name="With group id") UserReport.objects.create(event_id=event.event_id, project_id=event.project_id, name="With event id") EventAttachment.objects.create( event_id=event.event_id, project_id=event.project_id, file=File.objects.create(name="hello.png", type="image/png"), name="hello.png", ) GroupAssignee.objects.create(group=group, project=project, user=self.user) GroupHash.objects.create(project=project, group=group, hash=uuid4().hex) GroupMeta.objects.create(group=group, key="foo", value="bar") GroupRedirect.objects.create(group_id=group.id, previous_group_id=1) deletion = ScheduledDeletion.schedule(group, days=0) deletion.update(in_progress=True) assert nodestore.get(node_id) assert nodestore.get(node_id2) assert nodestore.get(node_id3) with self.tasks(): run_deletion(deletion.id) assert not Event.objects.filter(id=event.id).exists() assert not UserReport.objects.filter(group_id=group.id).exists() assert not UserReport.objects.filter(event_id=event.event_id).exists() assert not EventAttachment.objects.filter( event_id=event.event_id).exists() assert not GroupRedirect.objects.filter(group_id=group.id).exists() assert not GroupHash.objects.filter(group_id=group.id).exists() assert not Group.objects.filter(id=group.id).exists() assert not nodestore.get(node_id) assert not nodestore.get(node_id2) assert nodestore.get(node_id3), "Does not remove from second group"