def test_event_with_no_body(self): # remove the event from nodestore to simulate an event with no body. node_id = SnubaEvent.generate_node_id(self.proj1.id, self.event_id) nodestore.delete(node_id) assert nodestore.get(node_id) is None # Check that we can still serialize it event = SnubaEvent.get_event(self.proj1.id, self.event_id) serialized = serialize(event) assert event.data == {} # Check that the regular serializer still gives us back tags assert serialized['tags'] == [{ '_meta': None, 'key': 'baz', 'value': 'quux' }, { '_meta': None, 'key': 'foo', 'value': 'bar' }, { '_meta': None, 'key': 'release', 'value': 'release1' }, { '_meta': None, 'key': 'user', 'query': 'user.id:user1', 'value': 'id:user1' }]
def test_event_with_no_body(self): # remove the event from nodestore to simulate an event with no body. node_id = SnubaEvent.generate_node_id(self.proj1.id, self.event_id) nodestore.delete(node_id) assert nodestore.get(node_id) is None # Check that we can still serialize it event = eventstore.get_event_by_id( self.proj1.id, self.event_id, additional_columns=eventstore.full_columns ) serialized = serialize(event) assert event.data == {} # Check that the regular serializer still gives us back tags assert serialized["tags"] == [ {"_meta": None, "key": "baz", "value": "quux"}, {"_meta": None, "key": "environment", "value": "prod"}, {"_meta": None, "key": "foo", "value": "bar"}, {"_meta": None, "key": "level", "value": "error"}, {"_meta": None, "key": "release", "value": "release1"}, {"_meta": None, "key": "user", "query": 'user.id:"user1"', "value": "id:user1"}, ]
def on_delete(self, instance, **kwargs): value = getattr(instance, self.name) if not value.id: return nodestore.delete(value.id)
def on_delete(self, instance, **kwargs): value = getattr(instance, self.name) if not value.id: return nodestore.delete(value.id)
def backfill_eventstream(apps, schema_editor): """ Inserts Postgres events into the eventstream if there are recent events in Postgres. This is for open source users migrating from 9.x who want to keep their events. If there are no recent events in Postgres, skip the backfill. """ from sentry import eventstore, eventstream from sentry.utils.query import RangeQuerySetWrapper Event = apps.get_model("sentry", "Event") Group = apps.get_model("sentry", "Group") Project = apps.get_model("sentry", "Project") # Kill switch to skip this migration skip_backfill = os.environ.get("SENTRY_SKIP_EVENTS_BACKFILL_FOR_10", False) # Use 90 day retention if the option has not been set or set to 0 DEFAULT_RETENTION = 90 retention_days = options.get( "system.event-retention-days") or DEFAULT_RETENTION def get_events(last_days): to_date = timezone.now() from_date = to_date - timedelta(days=last_days) return Event.objects.filter(datetime__gte=from_date, datetime__lte=to_date, group_id__isnull=False) def _attach_related(_events): project_ids = set() group_ids = set() for event in _events: project_ids.add(event.project_id) group_ids.add(event.group_id) projects = { p.id: p for p in Project.objects.filter(id__in=project_ids) } groups = {g.id: g for g in Group.objects.filter(id__in=group_ids)} for event in _events: event.project = projects.get(event.project_id) event.group = groups.get(event.group_id) # When migrating old data from Sentry 9.0.0 to 9.1.2 to 10 in rapid succession, the event timestamp may be # missing. This adds it back if "timestamp" not in event.data.data: event.data.data["timestamp"] = to_timestamp(event.datetime) eventstore.bind_nodes(_events, "data") if skip_backfill: print("Skipping backfill.\n") # noqa: B314 return events = get_events(retention_days) count = events.count() if count == 0: print("Nothing to do, skipping migration.\n") # noqa: B314 return print("Events to process: {}\n".format(count)) # noqa: B314 processed = 0 for e in RangeQuerySetWrapper(events, step=100, callbacks=(_attach_related, )): event = NewEvent(project_id=e.project_id, event_id=e.event_id, group_id=e.group_id, data=e.data.data) try: group = event.group except Group.DoesNotExist: group = None if event.project is None or group is None or len(event.data) == 0: print( # noqa: B314 "Skipped {} as group, project or node data information is invalid.\n" .format(event)) continue try: eventstream.insert( group=event.group, event=event, is_new=False, is_regression=False, is_new_group_environment=False, primary_hash=event.get_primary_hash(), received_timestamp=event.data.get("received") or float(event.datetime.strftime("%s")), skip_consume=True, ) # The node ID format was changed in Sentry 9.1.0 # (https://github.com/getsentry/sentry/commit/f73a4039d16a5c4f88bde37f6464cac21deb50e1) # If we are migrating from older versions of Sentry (i.e. 9.0.0 and earlier) # we need to resave the node using the new node ID scheme and delete the old # node. old_node_id = e.data.id new_node_id = event.data.id if old_node_id != new_node_id: event.data.save() nodestore.delete(old_node_id) processed += 1 except Exception as error: print( # noqa: B314 "An error occured while trying to migrate the following event: {}\n.----\n{}" .format(event, error)) if processed == 0: raise Exception( "Cannot migrate any event. If this is okay, re-run migrations with SENTRY_SKIP_EVENTS_BACKFILL_FOR_10 environment variable set to skip this step." ) print( # noqa: B314 "Event migration done. Migrated {} of {} events.\n".format( processed, count))