def get_raw_data(self, offset=0): """ Returns list of GroupTagValues """ return tagstore.get_group_tag_value_iter( project_id=self.group.project_id, group_id=self.group.id, environment_id=self.environment_id, key=self.lookup_key, callbacks=self.callbacks, offset=offset, )
def get(self, request, organization, project, group_id, key): try: # TODO(tkaemming): This should *actually* redirect, see similar # comment in ``GroupEndpoint.convert_args``. group, _ = get_group_with_redirect( group_id, queryset=Group.objects.filter(project=project), ) except Group.DoesNotExist: raise Http404 if tagstore.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: environment_id = self._get_environment_id_from_request( request, project.organization_id) except Environment.DoesNotExist: # if the environment doesn't exist then the tag can't possibly exist raise Http404 # validate existance as it may be deleted try: tagstore.get_tag_key(project.id, environment_id, lookup_key) except tagstore.TagKeyNotFound: raise Http404 if key == 'user': callbacks = [attach_eventuser(project.id)] else: callbacks = [] gtv_iter = tagstore.get_group_tag_value_iter(group.project_id, group.id, environment_id, lookup_key, callbacks=callbacks) filename = '{}-{}'.format( group.qualified_short_id or group.id, key, ) return self.to_csv_response(gtv_iter, filename, key=key)
def get(self, request, organization, project, group_id, key): try: # TODO(tkaemming): This should *actually* redirect, see similar # comment in ``GroupEndpoint.convert_args``. group, _ = get_group_with_redirect( group_id, queryset=Group.objects.filter(project=project), ) except Group.DoesNotExist: raise Http404 if tagstore.is_reserved_key(key): lookup_key = u'sentry:{0}'.format(key) else: lookup_key = key try: environment_id = self._get_environment_id_from_request(request, project.organization_id) except Environment.DoesNotExist: # if the environment doesn't exist then the tag can't possibly exist raise Http404 # validate existance as it may be deleted try: tagstore.get_tag_key(project.id, environment_id, lookup_key) except tagstore.TagKeyNotFound: raise Http404 if key == 'user': callbacks = [attach_eventuser(project.id)] else: callbacks = [] gtv_iter = tagstore.get_group_tag_value_iter( group.project_id, group.id, environment_id, lookup_key, callbacks=callbacks ) filename = u'{}-{}'.format( group.qualified_short_id or group.id, key, ) return self.to_csv_response(gtv_iter, filename, key=key)
def process_issue_by_tag(data_export, file, limit=None): """ Convert the tag query to a CSV, writing it to the provided file. Returns the suggested file name. (Adapted from 'src/sentry/web/frontend/group_tag_export.py') """ # Get the pertaining project try: payload = data_export.query_info project = Project.objects.get(id=payload["project_id"]) except Project.DoesNotExist as error: metrics.incr("dataexport.error", instance=six.text_type(error)) logger.error("dataexport.error: {}".format(six.text_type(error))) raise DataExportError("Requested project does not exist") # Get the pertaining issue try: group, _ = get_group_with_redirect( payload["group_id"], queryset=Group.objects.filter(project=project) ) except Group.DoesNotExist as error: metrics.incr("dataexport.error", instance=six.text_type(error)) logger.error("dataexport.error: {}".format(six.text_type(error))) raise DataExportError("Requested issue does not exist") # Get the pertaining key key = payload["key"] lookup_key = six.text_type("sentry:{}").format(key) if tagstore.is_reserved_key(key) else key # If the key is the 'user' tag, attach the event user def attach_eventuser(items): users = EventUser.for_tags(group.project_id, [i.value for i in items]) for item in items: item._eventuser = users.get(item.value) # Create the fields/callback lists if key == "user": callbacks = [attach_eventuser] fields = [ "value", "id", "email", "username", "ip_address", "times_seen", "last_seen", "first_seen", ] else: callbacks = [] fields = ["value", "times_seen", "last_seen", "first_seen"] # Example file name: ISSUE_BY_TAG-project10-user__721.csv file_details = six.text_type("{}-{}__{}").format(project.slug, key, data_export.id) file_name = get_file_name(ExportQueryType.ISSUE_BY_TAG_STR, file_details) # Iterate through all the GroupTagValues writer = create_writer(file, fields) iteration = 0 with snuba_error_handler(): while True: offset = SNUBA_MAX_RESULTS * iteration next_offset = SNUBA_MAX_RESULTS * (iteration + 1) gtv_list = tagstore.get_group_tag_value_iter( project_id=group.project_id, group_id=group.id, environment_id=None, key=lookup_key, callbacks=callbacks, offset=offset, ) if len(gtv_list) == 0: break gtv_list_raw = [serialize_issue_by_tag(key, item) for item in gtv_list] if limit and limit < next_offset: # Since the next offset will pass the limit, write the remainder and quit writer.writerows(gtv_list_raw[: limit % SNUBA_MAX_RESULTS]) break else: writer.writerows(gtv_list_raw) iteration += 1 return file_name
def process_issue_by_tag(data_export, file): """ Convert the tag query to a CSV, writing it to the provided file. Returns the suggested file name. (Adapted from 'src/sentry/web/frontend/group_tag_export.py') """ # Get the pertaining project payload = data_export.query_info project = Project.objects.get(id=payload["project_id"]) # Get the pertaining issue group, _ = get_group_with_redirect( payload["group_id"], queryset=Group.objects.filter(project=project) ) # Get the pertaining key key = payload["key"] lookup_key = u"sentry:{0}".format(key) if tagstore.is_reserved_key(key) else key # If the key is the 'user' tag, attach the event user def attach_eventuser(items): users = EventUser.for_tags(group.project_id, [i.value for i in items]) for item in items: item._eventuser = users.get(item.value) # Create the fields/callback lists if key == "user": callbacks = [attach_eventuser] fields = [ "value", "id", "email", "username", "ip_address", "times_seen", "last_seen", "first_seen", ] else: callbacks = [] fields = ["value", "times_seen", "last_seen", "first_seen"] # Example file name: ISSUE_BY_TAG-project10-user__721.csv file_details = u"{}-{}__{}".format(project.slug, key, data_export.id) file_name = get_file_name(ExportQueryType.ISSUE_BY_TAG_STR, file_details) # Iterate through all the GroupTagValues writer = create_writer(file, fields) iteration = 0 while True: gtv_list = tagstore.get_group_tag_value_iter( project_id=group.project_id, group_id=group.id, environment_id=None, key=lookup_key, callbacks=callbacks, offset=SNUBA_MAX_RESULTS * iteration, ) gtv_list_raw = [serialize_issue_by_tag(key, item) for item in gtv_list] if len(gtv_list_raw) == 0: break writer.writerows(gtv_list_raw) iteration += 1 return file_name