def repair_tag_data(caches, project, events): for (group_id, env_name), keys in collect_tag_data(events).items(): environment = caches["Environment"](project.organization_id, env_name) for key, values in keys.items(): tagstore.get_or_create_group_tag_key( project_id=project.id, group_id=group_id, environment_id=environment.id, key=key ) # XXX: `{first,last}_seen` columns don't totally replicate the # ingestion logic (but actually represent a more accurate value.) # See GH-5289 for more details. for value, (times_seen, first_seen, last_seen) in values.items(): _, created = tagstore.get_or_create_group_tag_value( project_id=project.id, group_id=group_id, environment_id=environment.id, key=key, value=value, defaults={ "first_seen": first_seen, "last_seen": last_seen, "times_seen": times_seen, }, ) if not created: tagstore.incr_group_tag_value_times_seen( project_id=project.id, group_id=group_id, environment_id=environment.id, key=key, value=value, count=times_seen, extra={"first_seen": first_seen}, )
def repair_tag_data(caches, project, events): for group_id, keys in collect_tag_data(events).items(): for key, values in keys.items(): tagstore.get_or_create_group_tag_key( project_id=project.id, group_id=group_id, key=key, ) # XXX: `{first,last}_seen` columns don't totally replicate the # ingestion logic (but actually represent a more accurate value.) # See GH-5289 for more details. for value, (times_seen, first_seen, last_seen) in values.items(): instance, created = tagstore.get_or_create_group_tag_value( project_id=project.id, group_id=group_id, key=key, value=value, defaults={ 'first_seen': first_seen, 'last_seen': last_seen, 'times_seen': times_seen, }, ) if not created: instance.update( first_seen=first_seen, times_seen=F('times_seen') + times_seen, )
def merge_to(self, project): from sentry.models import (Group, Event) if not isinstance(project, Project): project = Project.objects.get_from_cache(pk=project) for group in Group.objects.filter(project=self): try: other = Group.objects.get(project=project, ) except Group.DoesNotExist: group.update(project=project) tagstore.update_project_for_group(group_id=group.id, old_project_id=self.id, new_project_id=project.id) else: Event.objects.filter( group_id=group.id, ).update(group_id=other.id) for obj in tagstore.get_group_tag_values(group_id=group.id): obj2, created = tagstore.get_or_create_group_tag_value( project_id=project.id, group_id=group.id, key=obj.key, value=obj.value, defaults={'times_seen': obj.times_seen}) if not created: obj2.update(times_seen=F('times_seen') + obj.times_seen) for fv in tagstore.get_tag_values(self.id): tagstore.get_or_create_tag_value(project_id=project.id, key=fv.key, value=fv.value) fv.delete() self.delete()
def repair_tag_data(caches, project, events): for group_id, keys in collect_tag_data(events).items(): for key, values in keys.items(): tagstore.get_or_create_group_tag_key( project_id=project.id, group_id=group_id, key=key, ) # XXX: `{first,last}_seen` columns don't totally replicate the # ingestion logic (but actually represent a more accurate value.) # See GH-5289 for more details. for value, (times_seen, first_seen, last_seen) in values.items(): instance, created = tagstore.get_or_create_group_tag_value( project_id=project.id, group_id=group_id, key=key, value=value, defaults={ 'first_seen': first_seen, 'last_seen': last_seen, 'times_seen': times_seen, }, ) if not created: instance.update( first_seen=first_seen, times_seen=F('times_seen') + times_seen, )
def _setup_tags_for_event(self, event): tags = dict(event.data.get('tags') or ()) try: environment = self.environments[event.data['environment']] except KeyError: environment = self.environments[ event.data['environment']] = Environment.get_or_create( event.project, event.data['environment'], ) GroupEnvironment.objects.get_or_create( environment_id=environment.id, group_id=event.group_id, ) for key, value in tags.items(): for environment_id in [AGGREGATE_ENVIRONMENT_ID, environment.id]: tag_value, created = tagstore.get_or_create_group_tag_value( project_id=event.project_id, group_id=event.group_id, environment_id=environment_id, key=key, value=value, ) if created: # XXX: Hack for tagstore compat tag_value.update( times_seen=1, first_seen=event.datetime, last_seen=event.datetime, ) else: updates = { 'times_seen': tag_value.times_seen + 1, } if event.datetime < tag_value.first_seen: updates['first_seen'] = event.datetime if event.datetime > tag_value.last_seen: updates['last_seen'] = event.datetime if updates: tag_value.update(**updates) tagstore.create_event_tags( project_id=event.project_id, group_id=event.group_id, environment_id=environment_id, event_id=event.id, tags=tags.items(), date_added=event.datetime, )
def _setup_tags_for_event(self, event): tags = dict(event.data['tags']) try: environment = self.environments[tags['environment']] except KeyError: environment = self.environments[tags['environment']] = Environment.get_or_create( event.project, tags['environment'], ) GroupEnvironment.objects.get_or_create( environment_id=environment.id, group_id=event.group_id, ) for key, value in tags.items(): for environment_id in [AGGREGATE_ENVIRONMENT_ID, environment.id]: tag_value, created = tagstore.get_or_create_group_tag_value( project_id=event.project_id, group_id=event.group_id, environment_id=environment_id, key=key, value=value, ) if created: # XXX: Hack for tagstore compat tag_value.update( times_seen=1, first_seen=event.datetime, last_seen=event.datetime, ) else: updates = { 'times_seen': tag_value.times_seen + 1, } if event.datetime < tag_value.first_seen: updates['first_seen'] = event.datetime if event.datetime > tag_value.last_seen: updates['last_seen'] = event.datetime if updates: tag_value.update(**updates) tagstore.create_event_tags( project_id=event.project_id, group_id=event.group_id, environment_id=environment_id, event_id=event.id, tags=tags.items(), date_added=event.datetime, )
def repair_tag_data(caches, project, events): for (group_id, env_name), keys in collect_tag_data(events).items(): environment = caches['Environment']( project.organization_id, env_name, ) for key, values in keys.items(): tagstore.get_or_create_group_tag_key( project_id=project.id, group_id=group_id, environment_id=environment.id, key=key, ) # XXX: `{first,last}_seen` columns don't totally replicate the # ingestion logic (but actually represent a more accurate value.) # See GH-5289 for more details. for value, (times_seen, first_seen, last_seen) in values.items(): _, created = tagstore.get_or_create_group_tag_value( project_id=project.id, group_id=group_id, environment_id=environment.id, key=key, value=value, defaults={ 'first_seen': first_seen, 'last_seen': last_seen, 'times_seen': times_seen, }, ) if not created: tagstore.incr_group_tag_value_times_seen( project_id=project.id, group_id=group_id, environment_id=environment.id, key=key, value=value, count=times_seen, extra={'first_seen': first_seen} )
def merge_to(self, project): from sentry.models import (Group, Event) if not isinstance(project, Project): project = Project.objects.get_from_cache(pk=project) for group in Group.objects.filter(project=self): try: other = Group.objects.get( project=project, ) except Group.DoesNotExist: group.update(project=project) tagstore.update_project_for_group( group_id=group.id, old_project_id=self.id, new_project_id=project.id) else: Event.objects.filter( group_id=group.id, ).update(group_id=other.id) for obj in tagstore.get_group_tag_values(group_id=group.id): obj2, created = tagstore.get_or_create_group_tag_value( project_id=project.id, group_id=group.id, key=obj.key, value=obj.value, defaults={'times_seen': obj.times_seen} ) if not created: obj2.update(times_seen=F('times_seen') + obj.times_seen) for fv in tagstore.get_tag_values(self.id): tagstore.get_or_create_tag_value(project_id=project.id, key=fv.key, value=fv.value) fv.delete() self.delete()