def get_group_tag_values_for_users(self, event_users, limit=100): """ While not specific to a group_id, this is currently only used in issues, so the Events dataset is used """ filters = {"project_id": [eu.project_id for eu in event_users]} conditions = [[ "tags[sentry:user]", "IN", [_f for _f in [eu.tag_value for eu in event_users] if _f] ]] aggregations = [ ["count()", "", "times_seen"], ["min", SEEN_COLUMN, "first_seen"], ["max", SEEN_COLUMN, "last_seen"], ] result = snuba.query( dataset=Dataset.Events, groupby=["group_id", "user_id"], conditions=conditions, filter_keys=filters, aggregations=aggregations, orderby="-last_seen", limit=limit, referrer="tagstore.get_group_tag_values_for_users", ) values = [] for issue, users in result.items(): for name, data in users.items(): values.append( GroupTagValue(group_id=issue, key="sentry:user", value=name, **fix_tag_value_data(data))) return values
def get_group_tag_value_iter( self, project_id, group_id, environment_ids, key, callbacks=(), limit=1000, offset=0 ): filters = { "project_id": get_project_list(project_id), "tags_key": [key], "group_id": [group_id], } if environment_ids: filters["environment"] = environment_ids results = snuba.query( groupby=["tags_value"], filter_keys=filters, aggregations=[ ["count()", "", "times_seen"], ["min", "timestamp", "first_seen"], ["max", "timestamp", "last_seen"], ], orderby="-first_seen", # Closest thing to pre-existing `-id` order limit=limit, referrer="tagstore.get_group_tag_value_iter", offset=offset, ) group_tag_values = [ GroupTagValue(group_id=group_id, key=key, value=value, **fix_tag_value_data(data)) for value, data in results.items() ] for cb in callbacks: cb(group_tag_values) return group_tag_values
def __get_tag_value(self, project_id, group_id, environment_id, key, value): tag = f"tags[{key}]" filters = {"project_id": get_project_list(project_id)} if environment_id: filters["environment"] = [environment_id] if group_id is not None: filters["group_id"] = [group_id] conditions = [[tag, "=", value]] aggregations = [ ["count()", "", "times_seen"], ["min", SEEN_COLUMN, "first_seen"], ["max", SEEN_COLUMN, "last_seen"], ] data = snuba.query( dataset=Dataset.Events, conditions=conditions, filter_keys=filters, aggregations=aggregations, referrer="tagstore.__get_tag_value", ) if not data["times_seen"] > 0: raise TagValueNotFound if group_id is None else GroupTagValueNotFound else: data.update({"key": key, "value": value}) if group_id is None: return TagValue(**fix_tag_value_data(data)) else: return GroupTagValue(group_id=group_id, **fix_tag_value_data(data))
def get_group_list_tag_value(self, project_ids, group_id_list, environment_ids, key, value): tag = f"tags[{key}]" filters = {"project_id": project_ids, "group_id": group_id_list} if environment_ids: filters["environment"] = environment_ids conditions = [[tag, "=", value], DEFAULT_TYPE_CONDITION] aggregations = [ ["count()", "", "times_seen"], ["min", SEEN_COLUMN, "first_seen"], ["max", SEEN_COLUMN, "last_seen"], ] result = snuba.query( dataset=Dataset.Events, groupby=["group_id"], conditions=conditions, filter_keys=filters, aggregations=aggregations, referrer="tagstore.get_group_list_tag_value", ) return { issue: GroupTagValue(group_id=issue, key=key, value=value, **fix_tag_value_data(data)) for issue, data in result.items() }
def get_group_tag_values_for_users(self, event_users, limit=100): start, end = self.get_time_range() filters = {'project_id': [eu.project_id for eu in event_users]} conditions = [[ 'tags[sentry:user]', 'IN', filter(None, [eu.tag_value for eu in event_users]) ]] aggregations = [ ['count()', '', 'times_seen'], ['min', SEEN_COLUMN, 'first_seen'], ['max', SEEN_COLUMN, 'last_seen'], ] result = snuba.query( start, end, ['issue', 'user_id'], conditions, filters, aggregations, orderby='-last_seen', limit=limit, referrer='tagstore.get_group_tag_values_for_users') values = [] for issue, users in six.iteritems(result): for name, data in six.iteritems(users): values.append( GroupTagValue(group_id=issue, key='sentry:user', value=name, **fix_tag_value_data(data))) return values
def get_group_list_tag_value(self, project_id, group_id_list, environment_id, key, value): start, end = self.get_time_range() tag = u'tags[{}]'.format(key) filters = { 'project_id': [project_id], 'issue': group_id_list, } if environment_id: filters['environment'] = [environment_id] conditions = [[tag, '=', value]] aggregations = [ ['count()', '', 'times_seen'], ['min', SEEN_COLUMN, 'first_seen'], ['max', SEEN_COLUMN, 'last_seen'], ] result = snuba.query(start, end, ['issue'], conditions, filters, aggregations, referrer='tagstore.get_group_list_tag_value') return { issue: GroupTagValue(group_id=issue, key=key, value=value, **fix_tag_value_data(data)) for issue, data in six.iteritems(result) }
def get_group_tag_values_for_users(self, event_users, limit=100): filters = {"project_id": [eu.project_id for eu in event_users]} conditions = [[ "tags[sentry:user]", "IN", filter(None, [eu.tag_value for eu in event_users]) ]] aggregations = [ ["count()", "", "times_seen"], ["min", SEEN_COLUMN, "first_seen"], ["max", SEEN_COLUMN, "last_seen"], ] result = snuba.query( groupby=["group_id", "user_id"], conditions=conditions, filter_keys=filters, aggregations=aggregations, orderby="-last_seen", limit=limit, referrer="tagstore.get_group_tag_values_for_users", ) values = [] for issue, users in six.iteritems(result): for name, data in six.iteritems(users): values.append( GroupTagValue(group_id=issue, key="sentry:user", value=name, **fix_tag_value_data(data))) return values
def __get_tag_value(self, project_id, group_id, environment_id, key, value): start, end = self.get_time_range() tag = 'tags[{}]'.format(key) filters = { 'project_id': [project_id], 'environment': [environment_id], } if group_id is not None: filters['issue'] = [group_id] conditions = [[tag, '=', value]] aggregations = [ ['count()', '', 'times_seen'], ['min', SEEN_COLUMN, 'first_seen'], ['max', SEEN_COLUMN, 'last_seen'], ] data = snuba.query(start, end, [], conditions, filters, aggregations, referrer='tagstore.__get_tag_value') if not data['times_seen'] > 0: raise TagValueNotFound if group_id is None else GroupTagValueNotFound else: data.update({ 'key': key, 'value': value, }) if group_id is None: return TagValue(**fix_tag_value_data(data)) else: return GroupTagValue(group_id=group_id, **fix_tag_value_data(data))
def test_get_group_tag_value_paginator(self): from sentry.tagstore.types import GroupTagValue assert list( self.ts.get_group_tag_value_paginator( self.proj1.id, self.proj1group1.id, self.proj1env1.id, 'sentry:user', ).get_result(10)) == [ GroupTagValue(group_id=self.proj1group1.id, key='sentry:user', value='id:user1', times_seen=1, first_seen=self.now - timedelta(seconds=1), last_seen=self.now - timedelta(seconds=1)), GroupTagValue(group_id=self.proj1group1.id, key='sentry:user', value='id:user2', times_seen=1, first_seen=self.now - timedelta(seconds=2), last_seen=self.now - timedelta(seconds=2)) ]
def test_with_user(self): user = self.create_user() grouptagvalue = GroupTagValue( group_id=0, key="sentry:user", value="username:ted", times_seen=1, first_seen=datetime(2018, 1, 1), last_seen=datetime(2018, 1, 1), ) result = serialize(grouptagvalue, user) assert result["key"] == "user" assert result["value"] == "username:ted" assert result["name"] == "ted"
def test_with_user(self): user = self.create_user() grouptagvalue = GroupTagValue( group_id=0, key='sentry:user', value='username:ted', times_seen=1, first_seen=datetime(2018, 1, 1), last_seen=datetime(2018, 1, 1), ) result = serialize(grouptagvalue, user) assert result['key'] == 'user' assert result['value'] == 'username:ted' assert result['name'] == 'ted'
def test_get_group_tag_value_iter(self): from sentry.tagstore.types import GroupTagValue assert list( self.ts.get_group_tag_value_iter( self.proj1.id, self.proj1group1.id, self.proj1env1.id, "sentry:user")) == [ GroupTagValue( group_id=self.proj1group1.id, key="sentry:user", value="id:user1", times_seen=1, first_seen=self.now - timedelta(seconds=1), last_seen=self.now - timedelta(seconds=1), ), GroupTagValue( group_id=self.proj1group1.id, key="sentry:user", value="id:user2", times_seen=1, first_seen=self.now - timedelta(seconds=2), last_seen=self.now - timedelta(seconds=2), ), ]
def get_group_tag_value_iter(self, project_id, group_id, environment_id, key, callbacks=()): start, end = self.get_time_range() filters = { 'project_id': [project_id], 'tags_key': [key], 'issue': [group_id], } if environment_id: filters['environment'] = [environment_id] results = snuba.query( start=start, end=end, groupby=['tags_value'], filter_keys=filters, aggregations=[ ['count()', '', 'times_seen'], ['min', 'timestamp', 'first_seen'], ['max', 'timestamp', 'last_seen'], ], orderby='-first_seen', # Closest thing to pre-existing `-id` order # TODO: This means they can't actually iterate all GroupTagValues. limit=1000, referrer='tagstore.get_group_tag_value_iter', ) group_tag_values = [ GroupTagValue(group_id=group_id, key=key, value=value, **fix_tag_value_data(data)) for value, data in six.iteritems(results) ] for cb in callbacks: cb(group_tag_values) return group_tag_values
def get_group_tag_values_for_users(self, event_users, limit=100): start, end = self.get_time_range() filters = {'project_id': [eu.project_id for eu in event_users]} or_conditions = [ cond for cond in [ ('user_id', 'IN', [eu.ident for eu in event_users if eu.ident]), ('email', 'IN', [eu.email for eu in event_users if eu.email]), ('username', 'IN', [eu.username for eu in event_users if eu.username]), ('ip_address', 'IN', [eu.ip_address for eu in event_users if eu.ip_address]), ] if cond[2] != [] ] conditions = [or_conditions] aggregations = [ ['count()', '', 'times_seen'], ['min', SEEN_COLUMN, 'first_seen'], ['max', SEEN_COLUMN, 'last_seen'], ] result = snuba.query( start, end, ['issue', 'user_id'], conditions, filters, aggregations, orderby='-last_seen', limit=limit, referrer='tagstore.get_group_tag_values_for_users') values = [] for issue, users in six.iteritems(result): for name, data in six.iteritems(users): values.append( GroupTagValue(group_id=issue, key='sentry:user', value=name, **fix_tag_value_data(data))) return values
def get_group_tag_value_iter(self, project_id, group_id, environment_id, key, callbacks=()): filters = { "project_id": get_project_list(project_id), "tags_key": [key], "issue": [group_id], } if environment_id: filters["environment"] = [environment_id] results = snuba.query( groupby=["tags_value"], filter_keys=filters, aggregations=[ ["count()", "", "times_seen"], ["min", "timestamp", "first_seen"], ["max", "timestamp", "last_seen"], ], orderby="-first_seen", # Closest thing to pre-existing `-id` order # TODO: This means they can't actually iterate all GroupTagValues. limit=1000, referrer="tagstore.get_group_tag_value_iter", ) group_tag_values = [ GroupTagValue(group_id=group_id, key=key, value=value, **fix_tag_value_data(data)) for value, data in six.iteritems(results) ] for cb in callbacks: cb(group_tag_values) return group_tag_values
def get(self, request, group, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key for an issue. :pparam string issue_id: the ID of the issue to retrieve. :pparam string key: the tag key to look the values up for. :auth: required """ lookup_key = tagstore.prefix_reserved_key(key) try: environment_id = self._get_environment_id_from_request( request, group.project.organization_id) except Environment.DoesNotExist: # if the environment doesn't exist then the tag can't possibly exist raise ResourceDoesNotExist try: tagstore.get_tag_key(group.project_id, environment_id, lookup_key) except tagstore.TagKeyNotFound: raise ResourceDoesNotExist queryset = tagstore.get_group_tag_value_qs(group.project_id, group.id, environment_id, lookup_key) sort = request.GET.get('sort') if sort == 'date': order_by = '-last_seen' paginator_cls = DateTimePaginator elif sort == 'age': order_by = '-first_seen' paginator_cls = DateTimePaginator else: order_by = '-id' paginator_cls = Paginator if key == 'user': serializer_cls = UserTagValueSerializer(group.project_id) else: serializer_cls = None return self.paginate( request=request, queryset=queryset, order_by=order_by, paginator_cls=paginator_cls, on_results=lambda results: serialize( map( # XXX: This is a pretty big abstraction leak lambda instance: GroupTagValue( group_id=instance.group_id, key=instance.key, value=instance.value, times_seen=instance.times_seen, last_seen=instance.last_seen, first_seen=instance.first_seen, ), results, ), request.user, serializer_cls, ), )
models.TagValue: lambda instance: TagValue( key=instance.key, value=instance.value, times_seen=instance.times_seen, first_seen=instance.first_seen, last_seen=instance.last_seen, ), models.GroupTagKey: lambda instance: GroupTagKey( group_id=instance.group_id, key=instance.key, values_seen=instance.values_seen, ), models.GroupTagValue: lambda instance: GroupTagValue( group_id=instance.group_id, key=instance.key, value=instance.value, times_seen=instance.times_seen, first_seen=instance.first_seen, last_seen=instance.last_seen, ), } class LegacyTagStorage(TagStorage): """\ The legacy tagstore backend ignores the ``environment_id`` (because it doesn't store this information in its models) and stores ``times_seen`` and ``values_seen`` in Postgres. """ def setup(self): self.setup_deletions()