def test_admin_access(self): user = self.create_user(username='******') organization = self.create_organization() self.create_member(user=user, organization=organization, role='admin') team = self.create_team(organization=organization) project = self.create_project(teams=[team]) result = serialize(project, user) result.pop('dateCreated') assert result['hasAccess'] is True assert result['isMember'] is False organization.flags.allow_joinleave = False organization.save() result = serialize(project, user) # after changing to allow_joinleave=False assert result['hasAccess'] is False assert result['isMember'] is False self.create_team_membership(user=user, team=team) result = serialize(project, user) # after giving them access to team assert result['hasAccess'] is True assert result['isMember'] is True
def test_environment(self): group = self.group environment = Environment.get_or_create(group.project, 'production') from sentry.api.serializers.models.group import tsdb with mock.patch( 'sentry.api.serializers.models.group.tsdb.get_range', side_effect=tsdb.get_range) as get_range: serialize( [group], serializer=StreamGroupSerializer( environment_func=lambda: environment, stats_period='14d', ), ) assert get_range.call_count == 1 for args, kwargs in get_range.call_args_list: assert kwargs['environment_id'] == environment.id def get_invalid_environment(): raise Environment.DoesNotExist() with mock.patch( 'sentry.api.serializers.models.group.tsdb.make_series', side_effect=tsdb.make_series) as make_series: serialize( [group], serializer=StreamGroupSerializer( environment_func=get_invalid_environment, stats_period='14d', ) ) assert make_series.call_count == 1
def get(self, request, project): """ List a project's processing issues. """ num_issues = ProcessingIssue.objects.filter( project=project ).count() last_seen = ProcessingIssue.objects.filter( project=project ).order_by('-datetime').first() resolveable_issues, has_more = ProcessingIssue.objects \ .find_resolved(project_id=project.id) reprocessing_issues = ReprocessingReport.objects \ .filter(project_id=project.id).count() data = { 'hasIssues': num_issues > 0, 'numIssues': num_issues, 'lastSeen': last_seen and serialize(last_seen.datetime) or None, 'resolveableIssues': len(resolveable_issues), 'hasMoreResolveableIssues': has_more, 'issuesProcessing': reprocessing_issues, } if request.GET.get('detailed') == '1': q = ProcessingIssue.objects.with_num_events().filter( project=project ).order_by('type') data['issues'] = [serialize(x, request.user) for x in q] return Response(serialize(data, request.user))
def test_simple(self): projects_ids = [self.project.id, self.project_2.id] org_members = list(self.org.member_set.filter( user__in=[ self.owner_user, self.user_2, ], ).order_by('user__email')) result = serialize( org_members, self.user_2, OrganizationMemberWithProjectsSerializer(project_ids=projects_ids), ) expected_projects = [ [self.project.slug, self.project_2.slug], [self.project.slug] ] expected_projects[0].sort() assert [r['projects'] for r in result] == expected_projects projects_ids = [self.project_2.id] result = serialize( org_members, self.user_2, OrganizationMemberWithProjectsSerializer(project_ids=projects_ids), ) expected_projects = [ [self.project_2.slug], [], ] assert [r['projects'] for r in result] == expected_projects
def test_simple(self): user = self.create_user(username='******') organization = self.create_organization(owner=user) team = self.create_team(organization=organization) project = self.create_project(teams=[team], organization=organization, name='foo') project2 = self.create_project(teams=[team], organization=organization, name='bar') result = serialize(team, user, TeamWithProjectsSerializer()) result.pop('dateCreated') # don't compare dateCreated because of mysql serialized_projects = serialize([project2, project], user) for p in serialized_projects: p.pop('dateCreated') for p in result['projects']: p.pop('dateCreated') assert result == { 'slug': team.slug, 'name': team.name, 'hasAccess': True, 'isPending': False, 'isMember': False, 'id': six.text_type(team.id), 'projects': serialized_projects, 'avatar': { 'avatarType': 'letter_avatar', 'avatarUuid': None, }, }
def get(self, request, project): """ List a Project's DSym Files ``````````````````````````` Retrieve a list of dsym files for a given project. :pparam string organization_slug: the slug of the organization the release belongs to. :pparam string project_slug: the slug of the project to list the dsym files of. :auth: required """ apps = DSymApp.objects.filter(project=project) dsym_files = VersionDSymFile.objects.filter( dsym_app=apps ).select_related('dsym_file').order_by('-build', 'version') file_list = ProjectDSymFile.objects.filter( project=project, versiondsymfile__isnull=True, ).select_related('file')[:100] download_requested = request.GET.get('download_id') is not None if download_requested and (request.access.has_scope('project:write')): return self.download(request.GET.get('download_id'), project) return Response( { 'apps': serialize(list(apps)), 'debugSymbols': serialize(list(dsym_files)), 'unreferencedDebugSymbols': serialize(list(file_list)), } )
def get_payload_v0(event): from sentry.api.serializers import serialize group = event.group project = group.project project_url_base = absolute_uri(u'/{}/{}'.format( project.organization.slug, project.slug, )) group_context = serialize(group) group_context['url'] = u'{}/issues/{}/'.format( project_url_base, group.id, ) event_context = serialize(event) event_context['url'] = u'{}/issues/{}/events/{}/'.format( project_url_base, group.id, event.id, ) data = { 'project': { 'slug': project.slug, 'name': project.name, }, 'group': group_context, 'event': event_context, } return data
def get(self, request, organization): """ List an organization's projects Return a list of projects bound to a organization. {method} {path} """ if request.auth and hasattr(request.auth, 'project'): team_list = [request.auth.project.team] project_list = [request.auth.project] else: team_list = list(request.access.teams) project_list = list(Project.objects.filter( team__in=team_list, ).order_by('name')) team_map = dict( (t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)), ) context = [] for project, pdata in zip(project_list, serialize(project_list, request.user)): pdata['team'] = team_map[project.team_id] context.append(pdata) return Response(context)
def get(self, request, project): """ Retrieve a Project `````````````````` Return details on an individual project. :pparam string organization_slug: the slug of the organization the project belongs to. :pparam string project_slug: the slug of the project to delete. :auth: required """ active_plugins = [ { 'name': plugin.get_title(), 'id': plugin.slug, } for plugin in plugins.configurable_for_project(project, version=None) if plugin.is_enabled(project) and plugin.has_project_conf() ] data = serialize(project, request.user) data['options'] = { 'sentry:origins': '\n'.join(project.get_option('sentry:origins', ['*']) or []), 'sentry:resolve_age': int(project.get_option('sentry:resolve_age', 0)), 'sentry:scrub_data': bool(project.get_option('sentry:scrub_data', True)), 'sentry:sensitive_fields': project.get_option('sentry:sensitive_fields', []), } data['activePlugins'] = active_plugins data['team'] = serialize(project.team, request.user) data['organization'] = serialize(project.organization, request.user) return Response(data)
def get(self, request, group, environment): # XXX(dcramer): we have no great way to pass the empty env if environment == 'none': environment = '' first_release = GroupRelease.objects.filter( group_id=group.id, environment=environment, ).order_by('first_seen').first() last_release = GroupRelease.objects.filter( group_id=group.id, environment=environment, ).order_by('-first_seen').first() stats_args = self._parse_args(request) context = { 'environment': { 'name': environment, }, 'firstRelease': serialize(first_release, request.user), 'lastRelease': serialize( last_release, request.user, GroupReleaseWithStatsSerializer( since=stats_args['start'], until=stats_args['end'], ) ), } return Response(context)
def test_seen_stats(self): group = self.create_group(first_seen=self.week_ago, times_seen=5) # should use group columns when no environments arg passed result = serialize(group, serializer=GroupSerializerSnuba()) assert result['count'] == '5' assert result['lastSeen'] == group.last_seen assert result['firstSeen'] == group.first_seen environment = self.create_environment(project=group.project) environment2 = self.create_environment(project=group.project) self.create_event( 'a' * 32, group=group, datetime=self.day_ago, tags={'environment': environment.name} ) self.create_event( 'b' * 32, group=group, datetime=self.min_ago, tags={'environment': environment.name} ) self.create_event( 'c' * 32, group=group, datetime=self.min_ago, tags={'environment': environment2.name} ) result = serialize( group, serializer=GroupSerializerSnuba( environment_ids=[environment.id, environment2.id]) ) assert result['count'] == '3' # result is rounded down to nearest second assert result['lastSeen'] == self.min_ago - timedelta(microseconds=self.min_ago.microsecond) assert result['firstSeen'] == self.day_ago - \ timedelta(microseconds=self.day_ago.microsecond)
def get_attrs(self, item_list, user): # TODO(dcramer); assert on relations users = { d['id']: d for d in serialize(set(i.user for i in item_list if i.user_id), user) } commit_ids = { i.data['commit'] for i in item_list if i.type == Activity.SET_RESOLVED_IN_COMMIT } if commit_ids: commit_list = list(Commit.objects.filter(id__in=commit_ids)) commits_by_id = { c.id: d for c, d in zip(commit_list, serialize(commit_list, user)) } commits = { i: commits_by_id.get(i.data['commit']) for i in item_list if i.type == Activity.SET_RESOLVED_IN_COMMIT } else: commits = {} return { item: { 'user': users[six.text_type(item.user_id)] if item.user_id else None, 'commit': commits.get(item), } for item in item_list }
def get(self, request, organization): """ Retrieve an organization Return details on an individual organization including various details such as membership access, features, and teams. {method} {path} """ team_list = list(Team.objects.filter( organization=organization, status=TeamStatus.VISIBLE, )) feature_list = [] if features.has('organizations:sso', organization, actor=request.user): feature_list.append('sso') if getattr(organization.flags, 'allow_joinleave'): feature_list.append('open-membership') context = serialize(organization, request.user) context['access'] = access.from_user(request.user, organization).scopes context['features'] = feature_list context['teams'] = serialize( team_list, request.user, TeamWithProjectsSerializer()) return Response(context)
def test_environment(self): group = self.group environment = Environment.get_or_create(group.project, 'production') with mock.patch( 'sentry.api.serializers.models.group.snuba_tsdb.get_range', side_effect=snuba_tsdb.get_range) as get_range: serialize( [group], serializer=StreamGroupSerializerSnuba( environment_ids=[environment.id], stats_period='14d', ), ) assert get_range.call_count == 1 for args, kwargs in get_range.call_args_list: assert kwargs['environment_ids'] == [environment.id] with mock.patch( 'sentry.api.serializers.models.group.snuba_tsdb.get_range', side_effect=snuba_tsdb.get_range) as get_range: serialize( [group], serializer=StreamGroupSerializerSnuba( environment_ids=None, stats_period='14d', ) ) assert get_range.call_count == 1 for args, kwargs in get_range.call_args_list: assert kwargs['environment_ids'] is None
def test_simple(self): user = self.create_user() project = self.create_project() release = Release.objects.create( project=project, version=uuid4().hex, new_groups=1, ) TagValue.objects.create( project=release.project, key='sentry:release', value=release.version, first_seen=timezone.now(), last_seen=timezone.now(), times_seen=5, ) result = serialize(release, user) assert result['version'] == release.version assert result['shortVersion'] == release.version assert result['newGroups'] == 1 assert result['firstEvent'] assert result['lastEvent'] # Make sure a sha1 value gets truncated release.version = '0' * 40 result = serialize(release, user) assert result['shortVersion'] == '0' * 12
def get(self, request, group_id): """ Retrieve an aggregate Return details on an individual aggregate. {method} {path} """ group = Group.objects.get( id=group_id, ) assert_perm(group, request.user, request.auth) data = serialize(group, request.user) # TODO: these probably should be another endpoint activity = self._get_activity(request, group, num=7) seen_by = self._get_seen_by(request, group) data.update({ 'activity': serialize(activity, request.user), 'seenBy': serialize(seen_by, request.user), }) return Response(data)
def get(self, request, share_id): """ Retrieve an aggregate Return details on an individual aggregate specified by it's shared ID. {method} {path} Note: This is not the equivilant of what you'd receive with the standard group details endpoint. Data is more restrictive and designed specifically for sharing. """ try: group = Group.from_share_id(share_id) except Group.DoesNotExist: raise ResourceDoesNotExist if group.organization.flags.disable_shared_issues: raise ResourceDoesNotExist event = group.get_latest_event() context = serialize(group, request.user, SharedGroupSerializer()) context['latestEvent'] = serialize(event, request.user, SharedEventSerializer()) # TODO(dcramer): use specific serializer for public group and embed # event details as part of api response return Response(context)
def get(self, request, organization): """ List an organization's projects Return a list of projects bound to a organization. {method} {path} """ team_list = Team.objects.get_for_user( organization=organization, user=request.user, ) project_list = [] for team in team_list: project_list.extend(Project.objects.get_for_user( team=team, user=request.user, )) project_list.sort(key=lambda x: x.name) team_map = dict( (t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)), ) context = [] for project, pdata in zip(project_list, serialize(project_list, request.user)): pdata['team'] = team_map[project.team_id] context.append(pdata) return Response(context)
def get(self, request, wizard_hash=None): """ This tries to retrieve and return the cache content if possible otherwise creates new cache """ if wizard_hash is not None: key = '%s%s' % (SETUP_WIZARD_CACHE_KEY, wizard_hash) wizard_data = default_cache.get(key) if wizard_data is None: return Response(status=404) elif wizard_data == 'empty': # when we just created a clean cache return Response(status=400) return Response(serialize(wizard_data)) else: # This creates a new available hash url for the project wizard rate_limited = ratelimits.is_limited( key='rl:setup-wizard:ip:%s' % request.META['REMOTE_ADDR'], limit=10, ) if rate_limited: logger.info('setup-wizard.rate-limit') return Response( { 'Too wizard requests', }, status=403 ) wizard_hash = get_random_string( 64, allowed_chars='abcdefghijklmnopqrstuvwxyz012345679') key = '%s%s' % (SETUP_WIZARD_CACHE_KEY, wizard_hash) default_cache.set(key, 'empty', SETUP_WIZARD_CACHE_TIMEOUT) return Response(serialize({'hash': wizard_hash}))
def serialize(self, obj, attrs, user): d = { 'id': six.text_type(obj.id), 'member': serialize(obj.member), 'team': serialize(obj.team), } return d
def test_simple(self): user = self.create_user(username='******') organization = self.create_organization(owner=user) team = self.create_team(organization=organization) project = self.create_project(teams=[team], organization=organization, name='foo') project2 = self.create_project(teams=[team], organization=organization, name='bar') result = serialize(team, user, TeamWithProjectsSerializer()) serialized_projects = serialize([project2, project], user) assert result == { 'slug': team.slug, 'name': team.name, 'hasAccess': True, 'isPending': False, 'isMember': False, 'id': six.text_type(team.id), 'projects': serialized_projects, 'avatar': { 'avatarType': 'letter_avatar', 'avatarUuid': None, }, 'memberCount': 0, 'dateCreated': team.date_added, }
def get(self, request, organization_slug): organization = Organization.objects.get_from_cache( slug=organization_slug, ) assert_perm(organization, request.user, request.auth) team_list = Team.objects.get_for_user( organization=organization, user=request.user, ) project_list = [] for team in team_list: project_list.extend(Project.objects.get_for_user( team=team, user=request.user, )) project_list.sort(key=lambda x: x.name) team_map = dict( (t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)), ) context = [] for project, pdata in zip(project_list, serialize(project_list, request.user)): pdata['team'] = team_map[project.team_id] context.append(pdata) return Response(context)
def react_plugin_config(plugin, project, request): response = client.get( '/projects/{}/{}/plugins/{}/'.format( project.organization.slug, project.slug, plugin.slug, ), request=request ) return mark_safe( """ <div id="ref-plugin-config"></div> <script> $(function(){ ReactDOM.render(React.createFactory(Sentry.PluginConfig)({ project: %s, organization: %s, data: %s }), document.getElementById('ref-plugin-config')); }); </script> """ % ( json.dumps_htmlsafe(serialize(project, request.user)), json.dumps_htmlsafe(serialize(project.organization, request.user)), json.dumps_htmlsafe(response.data) ) )
def get_attrs(self, item_list, user): # TODO(dcramer); assert on relations users = {d['id']: d for d in serialize(set(i.user for i in item_list if i.user_id), user)} commit_ids = { i.data['commit'] for i in item_list if i.type == Activity.SET_RESOLVED_IN_COMMIT } if commit_ids: commit_list = list(Commit.objects.filter(id__in=commit_ids)) commits_by_id = {c.id: d for c, d in zip(commit_list, serialize(commit_list, user))} commits = { i: commits_by_id.get(i.data['commit']) for i in item_list if i.type == Activity.SET_RESOLVED_IN_COMMIT } else: commits = {} pull_request_ids = { i.data['pull_request'] for i in item_list if i.type == Activity.SET_RESOLVED_IN_PULL_REQUEST } if pull_request_ids: pull_request_list = list(PullRequest.objects.filter(id__in=pull_request_ids)) pull_requests_by_id = { c.id: d for c, d in zip( pull_request_list, serialize( pull_request_list, user))} pull_requests = { i: pull_requests_by_id.get(i.data['pull_request']) for i in item_list if i.type == Activity.SET_RESOLVED_IN_PULL_REQUEST } else: pull_requests = {} groups = apply_values( functools.partial(serialize, user=user), Group.objects.in_bulk( set( i.data['source_id'] for i in item_list if i.type == Activity.UNMERGE_DESTINATION ) | set( i.data['destination_id'] for i in item_list if i.type == Activity.UNMERGE_SOURCE ) ) ) return { item: { 'user': users[six.text_type(item.user_id)] if item.user_id else None, 'source': groups.get(item.data['source_id']) if item.type == Activity.UNMERGE_DESTINATION else None, 'destination': groups.get(item.data['destination_id']) if item.type == Activity.UNMERGE_SOURCE else None, 'commit': commits.get(item), 'pull_request': pull_requests.get(item), } for item in item_list }
def get(self, request, group, environment): try: environment = Environment.objects.get( project_id=group.project_id, # XXX(dcramer): we have no great way to pass the empty env name='' if environment == 'none' else environment, ) except Environment.DoesNotExist: raise ResourceDoesNotExist first_release = GroupRelease.objects.filter( group_id=group.id, environment=environment.name, ).order_by('first_seen').first() last_release = GroupRelease.objects.filter( group_id=group.id, environment=environment.name, ).order_by('-first_seen').first() # the current release is the 'latest seen' release within the # environment even if it hasnt affected this issue current_release = GroupRelease.objects.filter( group_id=group.id, environment=environment.name, release_id=ReleaseEnvironment.objects.filter( project_id=group.project_id, environment_id=environment.id, ).order_by('-first_seen').values_list('release_id', flat=True).first(), ).first() last_seen = GroupRelease.objects.filter( group_id=group.id, environment=environment.name, ).order_by('-last_seen').values_list('last_seen', flat=True).first() until = request.GET.get('until') if until: until = to_datetime(float(until)) context = { 'environment': serialize( environment, request.user, GroupEnvironmentWithStatsSerializer( group=group, until=until, ) ), 'firstRelease': serialize(first_release, request.user), 'lastRelease': serialize(last_release, request.user), 'currentRelease': serialize( current_release, request.user, GroupReleaseWithStatsSerializer( until=until, ) ), 'lastSeen': last_seen, 'firstSeen': first_release.first_seen if first_release else None, } return Response(context)
def get(self, request, organization): """ List an Organization's Repositories ``````````````````````````````````` Return a list of version control repositories for a given organization. :pparam string organization_slug: the organization short name :auth: required """ queryset = Repository.objects.filter( organization_id=organization.id, ) status = request.GET.get('status', 'active') if status == 'active': queryset = queryset.filter( status=ObjectStatus.VISIBLE, ) elif status == 'deleted': queryset = queryset.exclude( status=ObjectStatus.VISIBLE, ) # TODO(mn): Remove once old Plugins are removed or everyone migrates to # the new Integrations. Hopefully someday? elif status == 'unmigratable': integrations = Integration.objects.filter( organizationintegration__organization=organization, organizationintegration__status=ObjectStatus.ACTIVE, provider__in=('bitbucket', 'github', 'vsts'), status=ObjectStatus.ACTIVE, ) repos = [] for i in integrations: try: repos.extend(i.get_installation(organization.id) .get_unmigratable_repositories()) except Exception: capture_exception() # Don't rely on the Integration's API being available. If # it's not, the page should still render. continue return Response(serialize(repos, request.user)) elif status: queryset = queryset.none() return self.paginate( request=request, queryset=queryset, order_by='name', on_results=lambda x: serialize(x, request.user), paginator_cls=OffsetPaginator, )
def serialize(self, obj, attrs, user): from sentry import features from sentry.app import env from sentry.api.serializers.models.team import TeamWithProjectsSerializer team_list = list(Team.objects.filter( organization=obj, status=TeamStatus.VISIBLE, )) for team in team_list: team._organization_cache = obj onboarding_tasks = list(OrganizationOnboardingTask.objects.filter( organization=obj, ).select_related('user')) feature_list = [] if features.has('organizations:sso', obj, actor=user): feature_list.append('sso') if features.has('organizations:callsigns', obj, actor=user): feature_list.append('callsigns') if features.has('organizations:new-tracebacks', obj, actor=user): feature_list.append('new-tracebacks') if features.has('organizations:onboarding', obj, actor=user) and \ not OrganizationOption.objects.filter(organization=obj).exists(): feature_list.append('onboarding') if features.has('organizations:api-keys', obj, actor=user) or \ ApiKey.objects.filter(organization=obj).exists(): feature_list.append('api-keys') if getattr(obj.flags, 'allow_joinleave'): feature_list.append('open-membership') if not getattr(obj.flags, 'disable_shared_issues'): feature_list.append('shared-issues') context = super(DetailedOrganizationSerializer, self).serialize( obj, attrs, user) context['quota'] = { 'maxRate': quotas.get_organization_quota(obj), 'projectLimit': int(OrganizationOption.objects.get_value( organization=obj, key='sentry:project-rate-limit', default=100, )), } context['teams'] = serialize( team_list, user, TeamWithProjectsSerializer()) if env.request: context['access'] = access.from_request(env.request, obj).scopes else: context['access'] = access.from_user(user, obj).scopes context['features'] = feature_list context['pendingAccessRequests'] = OrganizationAccessRequest.objects.filter( team__organization=obj, ).count() context['onboardingTasks'] = serialize(onboarding_tasks, user, OnboardingTasksSerializer()) return context
def test_visibility(self): visible = self.create_environment(name='visible', project=self.project, is_hidden=False) hidden = self.create_environment(name='not visible', project=self.project, is_hidden=True) not_set = self.create_environment(name='null visible', project=self.project) response = self.get_valid_response(self.project.organization.slug, visibility='visible') assert response.data == serialize([not_set, visible]) response = self.get_valid_response(self.project.organization.slug, visibility='hidden') assert response.data == serialize([hidden]) response = self.get_valid_response(self.project.organization.slug, visibility='all') assert response.data == serialize([hidden, not_set, visible])
def test_serialize(self): assert serialize([]) == [] assert serialize(None) is None user = self.create_user() # We don't want to assert on the value, just that it serialized assert isinstance(serialize(user), dict) # explicitly passed serializer foo_serializer = FooSerializer() assert serialize(user, serializer=foo_serializer) == 'lol' foo = Foo() assert serialize(foo) is foo, 'should return the object when unknown' assert serialize(foo, serializer=foo_serializer) == 'lol' rv = serialize([user]) assert isinstance(rv, list) assert len(rv) == 1 rv = serialize([user, None]) assert isinstance(rv, list) assert len(rv) == 2 assert rv[1] is None rv = serialize([None, user]) assert isinstance(rv, list) assert len(rv) == 2 assert rv[0] is None assert isinstance(rv[1], dict)
def get(self, request, project, plugin_id): plugin = self._get_plugin(plugin_id) try: context = serialize(plugin, request.user, PluginWithConfigSerializer(project)) except PluginIdentityRequired as e: context = serialize(plugin, request.user, PluginSerializer(project)) context['config_error'] = e.message context['auth_url'] = reverse('socialauth_associate', args=[plugin.slug]) return Response(context)
def _process_resource_change(action, sender, instance_id, retryer=None, *args, **kwargs): # The class is serialized as a string when enqueueing the class. model = TYPES[sender] # The Event model has different hooks for the different event types. The sender # determines which type eg. Error and therefore the 'name' eg. error if issubclass(model, EventCommon): if not kwargs.get("instance"): extra = {"sender": sender, "action": action, "event_id": instance_id} logger.info("process_resource_change.event_missing_event", extra=extra) return name = sender.lower() else: # Some resources are named differently than their model. eg. Group vs Issue. # Looks up the human name for the model. Defaults to the model name. name = RESOURCE_RENAMES.get(model.__name__, model.__name__.lower()) # By default, use Celery's `current` but allow a value to be passed for the # bound Task. retryer = retryer or current # We may run into a race condition where this task executes before the # transaction that creates the Group has committed. try: if issubclass(model, EventCommon): # XXX:(Meredith): Passing through the entire event was an intentional choice # to avoid having to query NodeStore again for data we had previously in # post_process. While this is not ideal, changing this will most likely involve # an overhaul of how we do things in post_process, not just this task alone. instance = kwargs.get("instance") else: instance = model.objects.get(id=instance_id) except model.DoesNotExist as e: # Explicitly requeue the task, so we don't report this to Sentry until # we hit the max number of retries. return retryer.retry(exc=e) event = "{}.{}".format(name, action) if event not in VALID_EVENTS: return org = None if isinstance(instance, Group) or issubclass(model, EventCommon): org = Organization.objects.get_from_cache( id=Project.objects.get_from_cache(id=instance.project_id).organization_id ) installations = filter( lambda i: event in i.sentry_app.events, org.sentry_app_installations.select_related("sentry_app"), ) for installation in installations: data = {} if issubclass(model, EventCommon): data[name] = _webhook_event_data(instance, instance.group_id, instance.project_id) send_webhooks(installation, event, data=data) else: data[name] = serialize(instance) send_webhooks(installation, event, data=data) metrics.incr("resource_change.processed", sample_rate=1.0, tags={"change_event": event})
def test_no_user_unsubscribed(self): group = self.create_group() result = serialize(group) assert not result['isSubscribed']
def get(self, request, organization): """ List an Organization's Releases ``````````````````````````````` Return a list of releases for a given organization. :pparam string organization_slug: the organization short name :qparam string query: this parameter can be used to create a "starts with" filter for the version. """ query = request.GET.get("query") with_health = request.GET.get("health") == "1" status_filter = request.GET.get("status", "open") flatten = request.GET.get("flatten") == "1" sort = request.GET.get("sort") or "date" health_stat = request.GET.get("healthStat") or "sessions" summary_stats_period = request.GET.get("summaryStatsPeriod") or "14d" health_stats_period = request.GET.get("healthStatsPeriod") or ( "24h" if with_health else "") if summary_stats_period not in STATS_PERIODS: raise ParseError(detail=get_stats_period_detail( "summaryStatsPeriod", STATS_PERIODS)) if health_stats_period and health_stats_period not in STATS_PERIODS: raise ParseError(detail=get_stats_period_detail( "healthStatsPeriod", STATS_PERIODS)) if health_stat not in ("sessions", "users"): raise ParseError(detail="invalid healthStat") paginator_cls = OffsetPaginator paginator_kwargs = {} try: filter_params = self.get_filter_params(request, organization, date_filter_optional=True) except NoProjects: return Response([]) # This should get us all the projects into postgres that have received # health data in the last 24 hours. If health data is not requested # we don't upsert releases. if with_health: debounce_update_release_health_data(organization, filter_params["project_id"]) queryset = Release.objects.filter(organization=organization) if status_filter: try: status_int = ReleaseStatus.from_string(status_filter) except ValueError: raise ParseError(detail="invalid value for status") if status_int == ReleaseStatus.OPEN: queryset = queryset.filter( Q(status=status_int) | Q(status=None)) else: queryset = queryset.filter(status=status_int) queryset = queryset.select_related("owner").annotate(date=Coalesce( "date_released", "date_added"), ) queryset = add_environment_to_queryset(queryset, filter_params) if query: query_q = Q(version__icontains=query) suffix_match = _release_suffix.match(query) if suffix_match is not None: query_q |= Q(version__icontains="%s+%s" % suffix_match.groups()) queryset = queryset.filter(query_q) select_extra = {} queryset = queryset.distinct() if flatten: select_extra[ "_for_project_id"] = "sentry_release_project.project_id" if sort == "date": queryset = queryset.filter( projects__id__in=filter_params["project_id"]).order_by("-date") paginator_kwargs["order_by"] = "-date" elif sort in ( "crash_free_sessions", "crash_free_users", "sessions", "users", "sessions_24h", "users_24h", ): if not flatten: return Response( { "detail": "sorting by crash statistics requires flattening (flatten=1)" }, status=400, ) paginator_cls = MergingOffsetPaginator paginator_kwargs.update( data_load_func=lambda offset, limit: get_project_releases_by_stability( project_ids=filter_params["project_id"], environments=filter_params.get("environment"), scope=sort, offset=offset, stats_period=summary_stats_period, limit=limit, ), apply_to_queryset=lambda queryset, rows: queryset.filter( projects__id__in=list(x[0] for x in rows), version__in=list(x[1] for x in rows)), key_from_model=lambda x: (x._for_project_id, x.version), ) else: return Response({"detail": "invalid sort"}, status=400) queryset = queryset.extra(select=select_extra) queryset = add_date_filter_to_queryset(queryset, filter_params) return self.paginate( request=request, queryset=queryset, paginator_cls=paginator_cls, on_results=lambda x: serialize( x, request.user, with_health_data=with_health, health_stat=health_stat, health_stats_period=health_stats_period, summary_stats_period=summary_stats_period, environments=filter_params.get("environment") or None, ), **paginator_kwargs)
def put(self, request, organization, member_id): """ Update an invite request to Organization ```````````````````````````````````````` Update and/or approve an invite request to an organization. :pparam string organization_slug: the slug of the organization the member will belong to :param string member_id: the member ID :param boolean approve: allows the member to be invited :param string role: the suggested role of the new member :param array teams: the suggested slugs of the teams the member should belong to. :auth: required """ try: member = self._get_member(organization, member_id) except OrganizationMember.DoesNotExist: raise ResourceDoesNotExist serializer = OrganizationMemberSerializer( data=request.data, context={ "organization": organization, "allowed_roles": roles.get_all() }, partial=True, ) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) result = serializer.validated_data if result.get("role"): member.update(role=result["role"]) if "teams" in result: save_team_assignments(member, result["teams"]) if "approve" in request.data: _, allowed_roles = get_allowed_roles(request, organization) serializer = ApproveInviteRequestSerializer( data=request.data, context={ "request": request, "organization": organization, "member": member, "allowed_roles": allowed_roles, }, ) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) result = serializer.validated_data if result.get("approve") and not member.invite_approved: member.approve_invite() member.save() if settings.SENTRY_ENABLE_INVITES: member.send_invite_email() member_invited.send_robust( member=member, user=request.user, sender=self, referrer=request.data.get("referrer"), ) self.create_audit_entry( request=request, organization_id=organization.id, target_object=member.id, data=member.get_audit_log_data(), event=AuditLogEntryEvent.MEMBER_INVITE if settings.SENTRY_ENABLE_INVITES else AuditLogEntryEvent.MEMBER_ADD, ) return Response( serialize(member, serializer=OrganizationMemberWithTeamsSerializer()), status=status.HTTP_200_OK, )
def get(self, request, event_id): """ Retrieve an Event ````````````````` This endpoint returns the data for a specific event. The event ID is the event as it appears in the Sentry database and not the event ID that is reported by the client upon submission. """ event = Event.objects.get(id=event_id) self.check_object_permissions(request, event.group) Event.objects.bind_nodes([event], 'data') # HACK(dcramer): work around lack of unique sorting on datetime base_qs = Event.objects.filter( group=event.group_id, ).exclude(id=event.id) # First, we collect 5 leading/trailing events next_events = sorted( base_qs.filter( datetime__gte=event.datetime, ).order_by('datetime')[0:5], key=EVENT_ORDERING_KEY, ) prev_events = sorted( base_qs.filter( datetime__lte=event.datetime, ).order_by('-datetime')[0:5], key=EVENT_ORDERING_KEY, reverse=True, ) # Now, try and find the real next event. # "next" means: # * If identical timestamps, greater of the ids # * else greater of the timestamps next_event = None for e in next_events: if e.datetime == event.datetime and e.id > event.id: next_event = e break if e.datetime > event.datetime: next_event = e break # Last, pick the previous event # "previous" means: # * If identical timestamps, lesser of the ids # * else lesser of the timestamps prev_event = None for e in prev_events: if e.datetime == event.datetime and e.id < event.id: prev_event = e break if e.datetime < event.datetime: prev_event = e break try: user_report = UserReport.objects.get( event_id=event.event_id, project=event.project, ) except UserReport.DoesNotExist: user_report = None data = serialize(event, request.user) data['userReport'] = serialize(user_report, request.user) data['release'] = self._get_release_info(request, event) if next_event: data['nextEventID'] = str(next_event.id) else: data['nextEventID'] = None if prev_event: data['previousEventID'] = str(prev_event.id) else: data['previousEventID'] = None return Response(data)
def get(self, request: Request, organization, member) -> Response: context = serialize( member, serializer=_scim_member_serializer_with_expansion(organization), ) return Response(context)
def get(self, request, group): """ Retrieve an Issue ````````````````` Return details on an individual issue. This returns the basic stats for the issue (title, last seen, first seen), some overall numbers (number of comments, user reports) as well as the summarized event data. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ # TODO(dcramer): handle unauthenticated/public response data = serialize( group, request.user, GroupSerializer(environment_func=self._get_environment_func( request, group.project.organization_id))) # TODO: these probably should be another endpoint activity = self._get_activity(request, group, num=100) seen_by = self._get_seen_by(request, group) first_release = group.get_first_release() if first_release is not None: last_release = group.get_last_release() else: last_release = None action_list = self._get_actions(request, group) if first_release: first_release = self._get_release_info(request, group, first_release) if last_release: last_release = self._get_release_info(request, group, last_release) try: environment_id = self._get_environment_id_from_request( request, group.project.organization_id) except Environment.DoesNotExist: get_range = lambda model, keys, start, end, **kwargs: \ {k: tsdb.make_series(0, start, end) for k in keys} tags = [] user_reports = UserReport.objects.none() else: get_range = functools.partial(tsdb.get_range, environment_id=environment_id) tags = tagstore.get_group_tag_keys(group.project_id, group.id, environment_id, limit=100) if environment_id is None: user_reports = UserReport.objects.filter(group=group) else: user_reports = UserReport.objects.filter( group=group, environment_id=environment_id) now = timezone.now() hourly_stats = tsdb.rollup( get_range( model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=1), ), 3600)[group.id] daily_stats = tsdb.rollup( get_range( model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=30), ), 3600 * 24)[group.id] participants = list( User.objects.filter( groupsubscription__is_active=True, groupsubscription__group=group, )) data.update({ 'firstRelease': first_release, 'lastRelease': last_release, 'activity': serialize(activity, request.user), 'seenBy': seen_by, 'participants': serialize(participants, request.user), 'pluginActions': action_list, 'pluginIssues': self._get_available_issue_plugins(request, group), 'pluginContexts': self._get_context_plugins(request, group), 'userReportCount': user_reports.count(), 'tags': sorted(serialize(tags, request.user), key=lambda x: x['name']), 'stats': { '24h': hourly_stats, '30d': daily_stats, } }) # the current release is the 'latest seen' release within the # environment even if it hasnt affected this issue try: environment = self._get_environment_from_request( request, group.project.organization_id, ) except Environment.DoesNotExist: environment = None if environment is not None: try: current_release = GroupRelease.objects.filter( group_id=group.id, environment=environment.name, release_id=ReleaseEnvironment.objects.filter( release_id__in=ReleaseProject.objects.filter( project_id=group.project_id).values_list( 'release_id', flat=True), organization_id=group.project.organization_id, environment_id=environment.id, ).order_by('-first_seen').values_list('release_id', flat=True)[:1], )[0] except IndexError: current_release = None data.update({ 'currentRelease': serialize(current_release, request.user, GroupReleaseWithStatsSerializer()) }) return Response(data)
def put(self, request, project): """ Update a Project ```````````````` Update various attributes and configurable settings for the given project. Only supplied values are updated. :pparam string organization_slug: the slug of the organization the project belongs to. :pparam string project_slug: the slug of the project to update. :param string name: the new name for the project. :param string slug: the new slug for the project. :param string team: the slug of new team for the project. Note, will be deprecated soon when multiple teams can have access to a project. :param string platform: the new platform for the project. :param boolean isBookmarked: in case this API call is invoked with a user context this allows changing of the bookmark flag. :param int digestsMinDelay: :param int digestsMaxDelay: :auth: required """ has_project_write = (request.auth and request.auth.has_scope("project:write")) or ( request.access and request.access.has_scope("project:write")) changed_proj_settings = {} if has_project_write: serializer_cls = ProjectAdminSerializer else: serializer_cls = ProjectMemberSerializer serializer = serializer_cls(data=request.data, partial=True, context={ "project": project, "request": request }) if not serializer.is_valid(): return Response(serializer.errors, status=400) result = serializer.validated_data if not has_project_write: # options isn't part of the serializer, but should not be editable by members for key in chain(six.iterkeys(ProjectAdminSerializer().fields), ["options"]): if request.data.get(key) and not result.get(key): return Response( { "detail": [ "You do not have permission to perform this action." ] }, status=403, ) changed = False old_slug = None if result.get("slug"): old_slug = project.slug project.slug = result["slug"] changed = True changed_proj_settings["new_slug"] = project.slug if result.get("name"): project.name = result["name"] changed = True changed_proj_settings["new_project"] = project.name old_team_id = None new_team = None if result.get("team"): return Response( { "detail": ["Editing a team via this endpoint has been deprecated."] }, status=400) if result.get("platform"): project.platform = result["platform"] changed = True if changed: project.save() if old_team_id is not None: ProjectTeam.objects.filter( project=project, team_id=old_team_id).update(team=new_team) if old_slug: ProjectRedirect.record(project, old_slug) if result.get("isBookmarked"): try: with transaction.atomic(): ProjectBookmark.objects.create(project_id=project.id, user=request.user) except IntegrityError: pass elif result.get("isBookmarked") is False: ProjectBookmark.objects.filter(project_id=project.id, user=request.user).delete() if result.get("digestsMinDelay"): project.update_option("digests:mail:minimum_delay", result["digestsMinDelay"]) if result.get("digestsMaxDelay"): project.update_option("digests:mail:maximum_delay", result["digestsMaxDelay"]) if result.get("subjectPrefix") is not None: if project.update_option("mail:subject_prefix", result["subjectPrefix"]): changed_proj_settings["mail:subject_prefix"] = result[ "subjectPrefix"] if result.get("subjectTemplate"): project.update_option("mail:subject_template", result["subjectTemplate"]) if result.get("scrubIPAddresses") is not None: if project.update_option("sentry:scrub_ip_address", result["scrubIPAddresses"]): changed_proj_settings["sentry:scrub_ip_address"] = result[ "scrubIPAddresses"] if result.get("groupingConfig") is not None: if project.update_option("sentry:grouping_config", result["groupingConfig"]): changed_proj_settings["sentry:grouping_config"] = result[ "groupingConfig"] if result.get("groupingEnhancements") is not None: if project.update_option("sentry:grouping_enhancements", result["groupingEnhancements"]): changed_proj_settings["sentry:grouping_enhancements"] = result[ "groupingEnhancements"] if result.get("groupingEnhancementsBase") is not None: if project.update_option("sentry:grouping_enhancements_base", result["groupingEnhancementsBase"]): changed_proj_settings[ "sentry:grouping_enhancements_base"] = result[ "groupingEnhancementsBase"] if result.get("fingerprintingRules") is not None: if project.update_option("sentry:fingerprinting_rules", result["fingerprintingRules"]): changed_proj_settings["sentry:fingerprinting_rules"] = result[ "fingerprintingRules"] if result.get("securityToken") is not None: if project.update_option("sentry:token", result["securityToken"]): changed_proj_settings["sentry:token"] = result["securityToken"] if result.get("securityTokenHeader") is not None: if project.update_option("sentry:token_header", result["securityTokenHeader"]): changed_proj_settings["sentry:token_header"] = result[ "securityTokenHeader"] if result.get("verifySSL") is not None: if project.update_option("sentry:verify_ssl", result["verifySSL"]): changed_proj_settings["sentry:verify_ssl"] = result[ "verifySSL"] if result.get("dataScrubber") is not None: if project.update_option("sentry:scrub_data", result["dataScrubber"]): changed_proj_settings["sentry:scrub_data"] = result[ "dataScrubber"] if result.get("dataScrubberDefaults") is not None: if project.update_option("sentry:scrub_defaults", result["dataScrubberDefaults"]): changed_proj_settings["sentry:scrub_defaults"] = result[ "dataScrubberDefaults"] if result.get("sensitiveFields") is not None: if project.update_option("sentry:sensitive_fields", result["sensitiveFields"]): changed_proj_settings["sentry:sensitive_fields"] = result[ "sensitiveFields"] if result.get("safeFields") is not None: if project.update_option("sentry:safe_fields", result["safeFields"]): changed_proj_settings["sentry:safe_fields"] = result[ "safeFields"] if result.get("storeCrashReports") is not None: if project.update_option("sentry:store_crash_reports", result["storeCrashReports"]): changed_proj_settings["sentry:store_crash_reports"] = result[ "storeCrashReports"] if result.get("relayPiiConfig") is not None: if project.update_option("sentry:relay_pii_config", result["relayPiiConfig"]): changed_proj_settings["sentry:relay_pii_config"] = ( result["relayPiiConfig"].strip() or None) if result.get("builtinSymbolSources") is not None: if project.update_option("sentry:builtin_symbol_sources", result["builtinSymbolSources"]): changed_proj_settings[ "sentry:builtin_symbol_sources"] = result[ "builtinSymbolSources"] if result.get("symbolSources") is not None: if project.update_option("sentry:symbol_sources", result["symbolSources"]): changed_proj_settings[ "sentry:symbol_sources"] = result["symbolSources"] or None if "defaultEnvironment" in result: if result["defaultEnvironment"] is None: project.delete_option("sentry:default_environment") else: project.update_option("sentry:default_environment", result["defaultEnvironment"]) # resolveAge can be None if "resolveAge" in result: if project.update_option( "sentry:resolve_age", 0 if result.get("resolveAge") is None else int( result["resolveAge"]), ): changed_proj_settings["sentry:resolve_age"] = result[ "resolveAge"] if result.get("scrapeJavaScript") is not None: if project.update_option("sentry:scrape_javascript", result["scrapeJavaScript"]): changed_proj_settings["sentry:scrape_javascript"] = result[ "scrapeJavaScript"] if result.get("allowedDomains"): if project.update_option("sentry:origins", result["allowedDomains"]): changed_proj_settings["sentry:origins"] = result[ "allowedDomains"] if result.get("isSubscribed"): UserOption.objects.set_value(user=request.user, key="mail:alert", value=1, project=project) elif result.get("isSubscribed") is False: UserOption.objects.set_value(user=request.user, key="mail:alert", value=0, project=project) # TODO(dcramer): rewrite options to use standard API config if has_project_write: options = request.data.get("options", {}) if "sentry:origins" in options: project.update_option( "sentry:origins", clean_newline_inputs(options["sentry:origins"])) if "sentry:resolve_age" in options: project.update_option("sentry:resolve_age", int(options["sentry:resolve_age"])) if "sentry:scrub_data" in options: project.update_option("sentry:scrub_data", bool(options["sentry:scrub_data"])) if "sentry:scrub_defaults" in options: project.update_option("sentry:scrub_defaults", bool(options["sentry:scrub_defaults"])) if "sentry:safe_fields" in options: project.update_option( "sentry:safe_fields", [s.strip().lower() for s in options["sentry:safe_fields"]]) if "sentry:store_crash_reports" in options: project.update_option( "sentry:store_crash_reports", convert_crashreport_count( options["sentry:store_crash_reports"]), ) if "sentry:relay_pii_config" in options: project.update_option( "sentry:relay_pii_config", options["sentry:relay_pii_config"].strip() or None) if "sentry:sensitive_fields" in options: project.update_option( "sentry:sensitive_fields", [ s.strip().lower() for s in options["sentry:sensitive_fields"] ], ) if "sentry:scrub_ip_address" in options: project.update_option("sentry:scrub_ip_address", bool(options["sentry:scrub_ip_address"])) if "sentry:grouping_config" in options: project.update_option("sentry:grouping_config", options["sentry:grouping_config"]) if "sentry:fingerprinting_rules" in options: project.update_option("sentry:fingerprinting_rules", options["sentry:fingerprinting_rules"]) if "mail:subject_prefix" in options: project.update_option("mail:subject_prefix", options["mail:subject_prefix"]) if "sentry:default_environment" in options: project.update_option("sentry:default_environment", options["sentry:default_environment"]) if "sentry:csp_ignored_sources_defaults" in options: project.update_option( "sentry:csp_ignored_sources_defaults", bool(options["sentry:csp_ignored_sources_defaults"]), ) if "sentry:csp_ignored_sources" in options: project.update_option( "sentry:csp_ignored_sources", clean_newline_inputs( options["sentry:csp_ignored_sources"]), ) if "sentry:blacklisted_ips" in options: project.update_option( "sentry:blacklisted_ips", clean_newline_inputs(options["sentry:blacklisted_ips"]), ) if "feedback:branding" in options: project.update_option( "feedback:branding", "1" if options["feedback:branding"] else "0") if "sentry:reprocessing_active" in options: project.update_option( "sentry:reprocessing_active", bool(options["sentry:reprocessing_active"])) if "filters:blacklisted_ips" in options: project.update_option( "sentry:blacklisted_ips", clean_newline_inputs(options["filters:blacklisted_ips"]), ) if u"filters:{}".format(FilterTypes.RELEASES) in options: if features.has("projects:custom-inbound-filters", project, actor=request.user): project.update_option( u"sentry:{}".format(FilterTypes.RELEASES), clean_newline_inputs(options[u"filters:{}".format( FilterTypes.RELEASES)]), ) else: return Response( {"detail": ["You do not have that feature enabled"]}, status=400) if u"filters:{}".format(FilterTypes.ERROR_MESSAGES) in options: if features.has("projects:custom-inbound-filters", project, actor=request.user): project.update_option( u"sentry:{}".format(FilterTypes.ERROR_MESSAGES), clean_newline_inputs( options[u"filters:{}".format( FilterTypes.ERROR_MESSAGES)], case_insensitive=False, ), ) else: return Response( {"detail": ["You do not have that feature enabled"]}, status=400) if "copy_from_project" in result: if not project.copy_settings_from(result["copy_from_project"]): return Response( {"detail": ["Copy project settings failed."]}, status=409) self.create_audit_entry( request=request, organization=project.organization, target_object=project.id, event=AuditLogEntryEvent.PROJECT_EDIT, data=changed_proj_settings, ) data = serialize(project, request.user, DetailedProjectSerializer()) return Response(data)
def put(self, request, group): """ Update an Issue ``````````````` Updates an individual issues's attributes. Only the attributes submitted are modified. :pparam string issue_id: the ID of the group to retrieve. :param string status: the new status for the issue. Valid values are ``"resolved"``, ``resolvedInNextRelease``, ``"unresolved"``, and ``"ignored"``. :param string assignedTo: the actor id (or username) of the user or team that should be assigned to this issue. :param boolean hasSeen: in case this API call is invoked with a user context this allows changing of the flag that indicates if the user has seen the event. :param boolean isBookmarked: in case this API call is invoked with a user context this allows changing of the bookmark flag. :param boolean isSubscribed: :param boolean isPublic: sets the issue to public or private. :auth: required """ discard = request.DATA.get('discard') # TODO(dcramer): we need to implement assignedTo in the bulk mutation # endpoint try: response = client.put( path=u'/projects/{}/{}/issues/'.format( group.project.organization.slug, group.project.slug, ), params={ 'id': group.id, }, data=request.DATA, request=request, ) except client.ApiError as e: return Response(e.body, status=e.status_code) # if action was discard, there isn't a group to serialize anymore if discard: return response # we need to fetch the object against as the bulk mutation endpoint # only returns a delta, and object mutation returns a complete updated # entity. # TODO(dcramer): we should update the API and have this be an explicit # flag (or remove it entirely) so that delta's are the primary response # for mutation. group = Group.objects.get(id=group.id) serialized = serialize( group, request.user, GroupSerializer(environment_func=self._get_environment_func( request, group.project.organization_id))) return Response(serialized, status=response.status_code)
def post(self, request, organization, version): """ Create a Deploy ``````````````` Create a deploy for a given release. :pparam string organization_slug: the organization short name :pparam string version: the version identifier of the release. :param string environment: the environment you're deploying to :param string name: the optional name of the deploy :param url url: the optional url that points to the deploy :param datetime dateStarted: an optional date that indicates when the deploy started :param datetime dateFinished: an optional date that indicates when the deploy ended. If not provided, the current time is used. """ try: release = Release.objects.get( version=version, organization=organization, ) except Release.DoesNotExist: raise ResourceDoesNotExist if not self.has_release_permission(request, organization, release): raise PermissionDenied serializer = DeploySerializer(data=request.DATA) if serializer.is_valid(): result = serializer.object try: env = Environment.objects.get( organization_id=organization.id, name=result['environment'], ) except Environment.DoesNotExist: # TODO(jess): clean up when changing unique constraint lock_key = Environment.get_lock_key(organization.id, result['environment']) lock = locks.get(lock_key, duration=5) with TimedRetryPolicy(10)(lock.acquire): try: env = Environment.objects.get( organization_id=organization.id, name=result['environment'], ) except Environment.DoesNotExist: env = Environment.objects.create( organization_id=organization.id, name=result['environment'], ) try: with transaction.atomic(): deploy, created = Deploy.objects.create( organization_id=organization.id, release=release, environment_id=env.id, date_finished=result.get('dateFinished', timezone.now()), date_started=result.get('dateStarted'), name=result.get('name'), url=result.get('url'), ), True except IntegrityError: deploy, created = Deploy.objects.get( organization_id=organization.id, release=release, environment_id=env.id, ), False deploy.update( date_finished=result.get('dateFinished', timezone.now()), date_started=result.get('dateStarted'), ) activity = None for project in release.projects.all(): activity = Activity.objects.create( type=Activity.DEPLOY, project=project, ident=release.version, data={ 'version': release.version, 'deploy_id': deploy.id, 'environment': env.name }, datetime=deploy.date_finished, ) # Somewhat hacky, only send notification for one # Deploy Activity record because it will cover all projects if activity is not None: activity.send_notification() # This is the closest status code that makes sense, and we want # a unique 2xx response code so people can understand when # behavior differs. # 208 Already Reported (WebDAV; RFC 5842) status = 201 if created else 208 return Response(serialize(deploy, request.user), status=status) return Response(serializer.errors, status=400)
def get(self, request, organization): """ List one or more plugin configurations, including a `projectList` for each plugin which contains all the projects that have that specific plugin both configured and enabled. - similar to the `OrganizationPluginsEndpoint`, and can eventually replace it :qparam plugins array[string]: an optional list of plugin ids (slugs) if you want specific plugins. If not set, will return configurations for all plugins. """ desired_plugins = [] for slug in request.GET.getlist("plugins") or (): # if the user request a plugin that doesn't exist, throw 404 try: desired_plugins.append(plugins.get(slug)) except KeyError: return Response({"detail": "Plugin %s not found" % slug}, status=404) # if no plugins were specified, grab all plugins but limit by those that have the ability to be configured if not desired_plugins: desired_plugins = list(plugins.plugin_that_can_be_configured()) # `keys_to_check` are the ProjectOption keys that tell us if a plugin is enabled (e.g. `plugin:enabled`) or are # configured properly, meaning they have the required information - plugin.required_field - needed for the # plugin to work (ex:`opsgenie:api_key`) keys_to_check = [] for plugin in desired_plugins: keys_to_check.append("%s:enabled" % plugin.slug) if plugin.required_field: keys_to_check.append("%s:%s" % (plugin.slug, plugin.required_field)) # Get all the project options for org that have truthy values project_options = ProjectOption.objects.filter( key__in=keys_to_check, project__organization=organization).exclude(value__in=[False, ""]) """ This map stores info about whether a plugin is configured and/or enabled { "plugin_slug": { "project_id": { "enabled": True, "configured": False }, }, } """ info_by_plugin_project = {} for project_option in project_options: [slug, field] = project_option.key.split(":") project_id = project_option.project_id # first add to the set of all projects by plugin info_by_plugin_project.setdefault(slug, {}).setdefault( project_id, { "enabled": False, "configured": False }) # next check if enabled if field == "enabled": info_by_plugin_project[slug][project_id]["enabled"] = True # if the projectoption is not the enable field, it's configuration field else: info_by_plugin_project[slug][project_id]["configured"] = True # get the IDs of all projects for found project options and grab them from the DB project_id_set = { project_option.project_id for project_option in project_options } projects = Project.objects.filter(id__in=project_id_set, status=ObjectStatus.VISIBLE) # create a key/value map of our projects project_map = {project.id: project for project in projects} # iterate through the desired plugins and serialize them serialized_plugins = [] for plugin in desired_plugins: serialized_plugin = serialize(plugin, request.user, PluginSerializer()) serialized_plugin["projectList"] = [] info_by_project = info_by_plugin_project.get(plugin.slug, {}) # iterate through the projects for project_id, plugin_info in six.iteritems(info_by_project): # if the project is being deleted if project_id not in project_map: continue project = project_map[project_id] # only include plugins which are configured if not plugin_info["configured"]: continue serialized_plugin["projectList"].append({ "projectId": project.id, "projectSlug": project.slug, "projectName": project.name, # TODO(steve): do we need? "enabled": plugin_info["enabled"], "configured": plugin_info["configured"], # TODO(steve): do we need? "projectPlatform": project.platform, }) # sort by the projectSlug serialized_plugin["projectList"].sort( key=lambda x: x["projectSlug"]) serialized_plugins.append(serialized_plugin) return Response(serialized_plugins)
def serialize(self, obj, attrs, user, access): from sentry import experiments onboarding_tasks = list( OrganizationOnboardingTask.objects.filter(organization=obj).select_related("user") ) experiment_assignments = experiments.all(org=obj, actor=user) context = super(DetailedOrganizationSerializer, self).serialize(obj, attrs, user) max_rate = quotas.get_maximum_quota(obj) context["experiments"] = experiment_assignments context["quota"] = { "maxRate": max_rate[0], "maxRateInterval": max_rate[1], "accountLimit": int( OrganizationOption.objects.get_value( organization=obj, key="sentry:account-rate-limit", default=ACCOUNT_RATE_LIMIT_DEFAULT, ) ), "projectLimit": int( OrganizationOption.objects.get_value( organization=obj, key="sentry:project-rate-limit", default=PROJECT_RATE_LIMIT_DEFAULT, ) ), } context.update( { "isDefault": obj.is_default, "defaultRole": obj.default_role, "availableRoles": [{"id": r.id, "name": r.name} for r in roles.get_all()], "openMembership": bool(obj.flags.allow_joinleave), "require2FA": bool(obj.flags.require_2fa), "allowSharedIssues": not obj.flags.disable_shared_issues, "enhancedPrivacy": bool(obj.flags.enhanced_privacy), "dataScrubber": bool( obj.get_option("sentry:require_scrub_data", REQUIRE_SCRUB_DATA_DEFAULT) ), "dataScrubberDefaults": bool( obj.get_option("sentry:require_scrub_defaults", REQUIRE_SCRUB_DEFAULTS_DEFAULT) ), "sensitiveFields": obj.get_option( "sentry:sensitive_fields", SENSITIVE_FIELDS_DEFAULT ) or [], "safeFields": obj.get_option("sentry:safe_fields", SAFE_FIELDS_DEFAULT) or [], "storeCrashReports": convert_crashreport_count( obj.get_option("sentry:store_crash_reports") ), "attachmentsRole": six.text_type( obj.get_option("sentry:attachments_role", ATTACHMENTS_ROLE_DEFAULT) ), "eventsMemberAdmin": bool( obj.get_option("sentry:events_member_admin", EVENTS_MEMBER_ADMIN_DEFAULT) ), "scrubIPAddresses": bool( obj.get_option( "sentry:require_scrub_ip_address", REQUIRE_SCRUB_IP_ADDRESS_DEFAULT ) ), "scrapeJavaScript": bool( obj.get_option("sentry:scrape_javascript", SCRAPE_JAVASCRIPT_DEFAULT) ), "allowJoinRequests": bool( obj.get_option("sentry:join_requests", JOIN_REQUESTS_DEFAULT) ), "relayPiiConfig": six.text_type(obj.get_option("sentry:relay_pii_config") or u"") or None, "apdexThreshold": int( obj.get_option("sentry:apdex_threshold", APDEX_THRESHOLD_DEFAULT) ), } ) trusted_relays_raw = obj.get_option("sentry:trusted-relays") or [] # serialize trusted relays info into their external form context["trustedRelays"] = [TrustedRelaySerializer(raw).data for raw in trusted_relays_raw] context["access"] = access.scopes if access.role is not None: context["role"] = access.role context["pendingAccessRequests"] = OrganizationAccessRequest.objects.filter( team__organization=obj ).count() context["onboardingTasks"] = serialize(onboarding_tasks, user, OnboardingTasksSerializer()) return context
def get(self, request, project): """ List a Project's Issues ``````````````````````` Return a list of issues (groups) bound to a project. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``statsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :auth: required """ stats_period = request.GET.get('statsPeriod') if stats_period not in (None, '', '24h', '14d'): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = '24h' elif stats_period == '': # disable stats stats_period = None query = request.GET.get('query', '').strip() if query: matching_group = None matching_event = None if len(query) == 32: # check to see if we've got an event ID try: matching_group = Group.objects.from_event_id( project, query) except Group.DoesNotExist: pass else: try: matching_event = Event.objects.get( event_id=query, project_id=project.id) except Event.DoesNotExist: pass # If the query looks like a short id, we want to provide some # information about where that is. Note that this can return # results for another project. The UI deals with this. elif request.GET.get('shortIdLookup') == '1' and \ looks_like_short_id(query): try: matching_group = Group.objects.by_qualified_short_id( project.organization_id, query) except Group.DoesNotExist: matching_group = None if matching_group is not None: response = Response( serialize([matching_group], request.user, StreamGroupSerializer(stats_period=stats_period, matching_event_id=getattr( matching_event, 'id', None)))) response['X-Sentry-Direct-Hit'] = '1' return response try: query_kwargs = self._build_query_params_from_request( request, project) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) try: environment_id = self._get_environment_id_from_request( request, project.organization_id) except Environment.DoesNotExist: cursor_result = [] else: cursor_result = search.query(count_hits=True, environment_id=environment_id, **query_kwargs) results = list(cursor_result) context = serialize(results, request.user, StreamGroupSerializer(stats_period=stats_period)) # HACK: remove auto resolved entries if query_kwargs.get('status') == GroupStatus.UNRESOLVED: context = [r for r in context if r['status'] == 'unresolved'] response = Response(context) self.add_cursor_headers(request, response, cursor_result) if results and query not in SAVED_SEARCH_QUERIES: advanced_search.send(project=project, sender=request.user) return response
def _get_seen_by(self, request, group): seen_by = list( GroupSeen.objects.filter( group=group).select_related('user').order_by('-last_seen')) return serialize(seen_by, request.user)
def post(self, request, organization): """ Create a new asynchronous file export task, and email user upon completion, """ # The data export feature is only available alongside `discover-query`. # So to export issue tags, they must have have `discover-query` if not features.has("organizations:discover-query", organization): return Response(status=404) # Get environment_id and limit if available try: environment_id = self._get_environment_id_from_request( request, organization.id) except Environment.DoesNotExist as error: return Response(error, status=400) limit = request.data.get("limit") # Validate the data export payload serializer = DataExportQuerySerializer( data=request.data, context={ "organization": organization, "get_projects_by_id": lambda project_query: self._get_projects_by_id( project_query, request, organization), "get_projects": lambda: self.get_projects(request, organization), "has_arithmetic": features.has("organizations:discover-arithmetic", organization, actor=request.user), }, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) data = serializer.validated_data try: # If this user has sent a request with the same payload and organization, # we return them the latest one that is NOT complete (i.e. don't start another) query_type = ExportQueryType.from_str(data["query_type"]) data_export, created = ExportedData.objects.get_or_create( organization=organization, user=request.user, query_type=query_type, query_info=data["query_info"], date_finished=None, ) status = 200 if created: metrics.incr("dataexport.enqueue", tags={"query_type": data["query_type"]}, sample_rate=1.0) assemble_download.delay(data_export_id=data_export.id, export_limit=limit, environment_id=environment_id) status = 201 except ValidationError as e: # This will handle invalid JSON requests metrics.incr("dataexport.invalid", tags={"query_type": data.get("query_type")}, sample_rate=1.0) return Response({"detail": str(e)}, status=400) return Response(serialize(data_export, request.user), status=status)
def post(self, request, organization): """ Create a New Release for an Organization ```````````````````````````````````````` Create a new release for the given Organization. Releases are used by Sentry to improve its error reporting abilities by correlating first seen events with the release that might have introduced the problem. Releases are also necessary for sourcemaps and other debug features that require manual upload for functioning well. :pparam string organization_slug: the slug of the organization the release belongs to. :param string version: a version identifier for this release. Can be a version number, a commit hash etc. :param string ref: an optional commit reference. This is useful if a tagged version has been provided. :param url url: a URL that points to the release. This can be the path to an online interface to the sourcecode for instance. :param array projects: a list of project slugs that are involved in this release :param datetime dateReleased: an optional date that indicates when the release went live. If not provided the current time is assumed. :param array commits: an optional list of commit data to be associated with the release. Commits must include parameters ``id`` (the sha of the commit), and can optionally include ``repository``, ``message``, ``patch_set``, ``author_name``, ``author_email``, and ``timestamp``. :param array refs: an optional way to indicate the start and end commits for each repository included in a release. Head commits must include parameters ``repository`` and ``commit`` (the HEAD sha). They can optionally include ``previousCommit`` (the sha of the HEAD of the previous release), which should be specified if this is the first time you've sent commit data. :auth: required """ serializer = ReleaseSerializerWithProjects(data=request.DATA) if serializer.is_valid(): result = serializer.object allowed_projects = { p.slug: p for p in self.get_allowed_projects(request, organization) } projects = [] for slug in result['projects']: if slug not in allowed_projects: return Response({'projects': ['Invalid project slugs']}, status=400) projects.append(allowed_projects[slug]) # release creation is idempotent to simplify user # experiences try: with transaction.atomic(): release, created = Release.objects.create( organization_id=organization.id, version=result['version'], ref=result.get('ref'), url=result.get('url'), owner=result.get('owner'), date_released=result.get('dateReleased'), ), True except IntegrityError: release, created = Release.objects.get( organization_id=organization.id, version=result['version'], ), False else: release_created.send_robust(release=release, sender=self.__class__) new_projects = [] for project in projects: created = release.add_project(project) if created: new_projects.append(project) if release.date_released: for project in new_projects: Activity.objects.create( type=Activity.RELEASE, project=project, ident=Activity.get_version_ident(result['version']), data={'version': result['version']}, datetime=release.date_released, ) commit_list = result.get('commits') if commit_list: release.set_commits(commit_list) refs = result.get('refs') if not refs: refs = [{ 'repository': r['repository'], 'previousCommit': r.get('previousId'), 'commit': r['currentId'], } for r in result.get('headCommits', [])] if refs: if not request.user.is_authenticated(): return Response( { 'refs': [ 'You must use an authenticated API token to fetch refs' ] }, status=400) fetch_commits = not commit_list try: release.set_refs(refs, request.user, fetch=fetch_commits) except InvalidRepository as exc: return Response({'refs': [exc.message]}, status=400) if not created and not new_projects: # This is the closest status code that makes sense, and we want # a unique 2xx response code so people can understand when # behavior differs. # 208 Already Reported (WebDAV; RFC 5842) status = 208 else: status = 201 return Response(serialize(release, request.user), status=status) return Response(serializer.errors, status=400)
def upload_from_request(request, project=None): if 'file' not in request.FILES: return Response({'detail': 'Missing uploaded file'}, status=400) fileobj = request.FILES['file'] files = create_files_from_macho_zip(fileobj, project=project) return Response(serialize(files, request.user), status=201)
def post(self, request, project, version): """ Upload a New File ````````````````` Upload a new file for the given release. Unlike other API requests, files must be uploaded using the traditional multipart/form-data content-type. The optional 'name' attribute should reflect the absolute path that this file will be referenced as. For example, in the case of JavaScript you might specify the full web URI. :pparam string organization_slug: the slug of the organization the release belongs to. :pparam string project_slug: the slug of the project to change the release of. :pparam string version: the version identifier of the release. :param string name: the name (full path) of the file. :param string dist: the name of the dist. :param file file: the multipart encoded file. :param string header: this parameter can be supplied multiple times to attach headers to the file. Each header is a string in the format ``key:value``. For instance it can be used to define a content type. :auth: required """ try: release = Release.objects.get( organization_id=project.organization_id, projects=project, version=version, ) except Release.DoesNotExist: raise ResourceDoesNotExist if 'file' not in request.FILES: return Response({'detail': 'Missing uploaded file'}, status=400) fileobj = request.FILES['file'] full_name = request.DATA.get('name', fileobj.name) if not full_name: return Response({'detail': 'File name must be specified'}, status=400) name = full_name.rsplit('/', 1)[-1] if _filename_re.search(name): return Response( { 'detail': 'File name must not contain special whitespace characters' }, status=400) dist_name = request.DATA.get('dist') dist = None if dist_name: dist = release.add_dist(dist_name) headers = { 'Content-Type': fileobj.content_type, } for headerval in request.DATA.getlist('header') or (): try: k, v = headerval.split(':', 1) except ValueError: return Response( {'detail': 'header value was not formatted correctly'}, status=400) else: if _filename_re.search(v): return Response( { 'detail': 'header value must not contain special whitespace characters' }, status=400) headers[k] = v.strip() file = File.objects.create( name=name, type='release.file', headers=headers, ) file.putfile(fileobj) try: with transaction.atomic(): releasefile = ReleaseFile.objects.create( organization_id=release.organization_id, release=release, file=file, name=full_name, dist=dist) except IntegrityError: file.delete() return Response({'detail': ERR_FILE_EXISTS}, status=409) return Response(serialize(releasefile, request.user), status=201)
def _serialize_response(self, request, broadcast): serializer_cls = self._get_serializer(request) return self.respond(serialize(broadcast, request.user, serializer=serializer_cls()))
def _serialize_objects(self, items, request): serializer_cls = self._get_serializer(request) return serialize(items, request.user, serializer=serializer_cls())
def get(self, request): return Response( serialize([ e.as_dict() for e in sorted(ENHANCEMENT_BASES.values(), key=lambda x: x.id) ]))
def put(self, request, project, plugin_id): plugin = self._get_plugin(plugin_id) config = [ serialize_field(project, plugin, c) for c in plugin.get_config( project=project, user=request.user, initial=request.data) ] cleaned = {} errors = {} for field in config: key = field["name"] value = request.data.get(key) if field.get("required") and not value: errors[key] = ERR_FIELD_REQUIRED try: value = plugin.validate_config_field(project=project, name=key, value=value, actor=request.user) except ( forms.ValidationError, serializers.ValidationError, InvalidIdentity, PluginError, ) as e: errors[key] = str(e) if not errors.get(key): cleaned[key] = value if not errors: try: cleaned = plugin.validate_config(project=project, config=cleaned, actor=request.user) except (InvalidIdentity, PluginError) as e: errors["__all__"] = str(e) if errors: return Response({"errors": errors}, status=400) for key, value in cleaned.items(): if value is None: plugin.unset_option(project=project, key=key) else: plugin.set_option(project=project, key=key, value=value) context = serialize(plugin, request.user, PluginWithConfigSerializer(project)) plugin_enabled.send(plugin=plugin, project=project, user=request.user, sender=self) self.create_audit_entry( request=request, organization=project.organization, target_object=project.id, event=AuditLogEntryEvent.INTEGRATION_EDIT, data={ "integration": plugin_id, "project": project.slug }, ) return Response(context)
def get(self, request): """ List your Projects `````````````````` Return a list of projects available to the authenticated session. :auth: required """ queryset = Project.objects.select_related('organization').distinct() status = request.GET.get('status', 'active') if status == 'active': queryset = queryset.filter( status=ProjectStatus.VISIBLE, ) elif status == 'deleted': queryset = queryset.exclude( status=ProjectStatus.VISIBLE, ) elif status: queryset = queryset.none() if request.auth and not request.user.is_authenticated(): if hasattr(request.auth, 'project'): queryset = queryset.filter( id=request.auth.project_id, ) elif request.auth.organization is not None: queryset = queryset.filter( organization=request.auth.organization.id, ) else: queryset = queryset.none() elif not (is_active_superuser(request) and request.GET.get('show') == 'all'): queryset = queryset.filter( teams__organizationmember__user=request.user, ) query = request.GET.get('query') if query: tokens = tokenize_query(query) for key, value in six.iteritems(tokens): if key == 'query': value = ' '.join(value) queryset = queryset.filter(Q(name__icontains=value) | Q(slug__icontains=value)) elif key == 'slug': queryset = queryset.filter(in_iexact('slug', value)) elif key == 'name': queryset = queryset.filter(in_iexact('name', value)) elif key == 'platform': queryset = queryset.filter( id__in=ProjectPlatform.objects.filter( platform__in=value, ).values('project_id') ) elif key == 'id': queryset = queryset.filter(id__in=value) else: queryset = queryset.none() return self.paginate( request=request, queryset=queryset, order_by='-date_added', on_results=lambda x: serialize(x, request.user, ProjectWithOrganizationSerializer()), paginator_cls=DateTimePaginator, )
def get(self, request, user): serialized = serialize(user, request.user, UserNotificationsSerializer()) return Response(serialized)
def post(self, request, organization): """ Create a New Release for an Organization ```````````````````````````````````````` Create a new release for the given Organization. Releases are used by Sentry to improve its error reporting abilities by correlating first seen events with the release that might have introduced the problem. Releases are also necessary for sourcemaps and other debug features that require manual upload for functioning well. :pparam string organization_slug: the slug of the organization the release belongs to. :param string version: a version identifier for this release. Can be a version number, a commit hash etc. :param string ref: an optional commit reference. This is useful if a tagged version has been provided. :param url url: a URL that points to the release. This can be the path to an online interface to the sourcecode for instance. :param array projects: a list of project slugs that are involved in this release :param datetime dateReleased: an optional date that indicates when the release went live. If not provided the current time is assumed. :param array commits: an optional list of commit data to be associated with the release. Commits must include parameters ``id`` (the sha of the commit), and can optionally include ``repository``, ``message``, ``patch_set``, ``author_name``, ``author_email``, and ``timestamp``. See [release without integration example](/workflow/releases/). :param array refs: an optional way to indicate the start and end commits for each repository included in a release. Head commits must include parameters ``repository`` and ``commit`` (the HEAD sha). They can optionally include ``previousCommit`` (the sha of the HEAD of the previous release), which should be specified if this is the first time you've sent commit data. ``commit`` may contain a range in the form of ``previousCommit..commit`` :auth: required """ bind_organization_context(organization) serializer = ReleaseSerializerWithProjects(data=request.data) with configure_scope() as scope: if serializer.is_valid(): result = serializer.validated_data scope.set_tag("version", result["version"]) allowed_projects = { p.slug: p for p in self.get_projects(request, organization) } projects = [] for slug in result["projects"]: if slug not in allowed_projects: return Response( {"projects": ["Invalid project slugs"]}, status=400) projects.append(allowed_projects[slug]) new_status = result.get("status") # release creation is idempotent to simplify user # experiences try: release, created = Release.objects.get_or_create( organization_id=organization.id, version=result["version"], defaults={ "ref": result.get("ref"), "url": result.get("url"), "owner": result.get("owner"), "date_released": result.get("dateReleased"), "status": new_status or ReleaseStatus.OPEN, }, ) except IntegrityError: raise ConflictError( "Could not create the release it conflicts with existing data", ) if created: release_created.send_robust(release=release, sender=self.__class__) if not created and new_status is not None and new_status != release.status: release.status = new_status release.save() new_projects = [] for project in projects: created = release.add_project(project) if created: new_projects.append(project) if release.date_released: for project in new_projects: Activity.objects.create( type=Activity.RELEASE, project=project, ident=Activity.get_version_ident( result["version"]), data={"version": result["version"]}, datetime=release.date_released, ) commit_list = result.get("commits") if commit_list: try: release.set_commits(commit_list) except ReleaseCommitError: raise ConflictError( "Release commits are currently being processed") refs = result.get("refs") if not refs: refs = [{ "repository": r["repository"], "previousCommit": r.get("previousId"), "commit": r["currentId"], } for r in result.get("headCommits", [])] scope.set_tag("has_refs", bool(refs)) if refs: if not request.user.is_authenticated(): scope.set_tag("failure_reason", "user_not_authenticated") return Response( { "refs": [ "You must use an authenticated API token to fetch refs" ] }, status=400, ) fetch_commits = not commit_list try: release.set_refs(refs, request.user, fetch=fetch_commits) except InvalidRepository as e: scope.set_tag("failure_reason", "InvalidRepository") return Response({"refs": [six.text_type(e)]}, status=400) if not created and not new_projects: # This is the closest status code that makes sense, and we want # a unique 2xx response code so people can understand when # behavior differs. # 208 Already Reported (WebDAV; RFC 5842) status = 208 else: status = 201 analytics.record( "release.created", user_id=request.user.id if request.user and request.user.id else None, organization_id=organization.id, project_ids=[project.id for project in projects], user_agent=request.META.get("HTTP_USER_AGENT", ""), created_status=status, ) scope.set_tag("success_status", status) return Response(serialize(release, request.user), status=status) scope.set_tag("failure_reason", "serializer_error") return Response(serializer.errors, status=400)
def update_groups(request, projects, organization_id, search_fn): group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter( project__organization_id=organization_id, project__in=projects, id__in=group_ids, ) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] if not group_ids: return Response(status=204) else: group_list = None # TODO(jess): We may want to look into refactoring GroupValidator # to support multiple projects, but this is pretty complicated # because of the assignee validation. Punting on this for now. for project in projects: serializer = GroupValidator( data=request.DATA, partial=True, context={'project': project}, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) result = dict(serializer.object) # so we won't have to requery for each group project_lookup = {p.id: p for p in projects} acting_user = request.user if request.user.is_authenticated() else None if not group_ids: try: # bulk mutations are limited to 1000 items # TODO(dcramer): it'd be nice to support more than this, but its # a bit too complicated right now cursor_result, _ = search_fn({ 'limit': 1000, 'paginator_options': { 'max_limit': 1000 }, }) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) group_list = list(cursor_result) group_ids = [g.id for g in group_list] is_bulk = len(group_ids) > 1 group_project_ids = {g.project_id for g in group_list} # filter projects down to only those that have groups in the search results projects = [p for p in projects if p.id in group_project_ids] queryset = Group.objects.filter(id__in=group_ids, ) discard = result.get('discard') if discard: return handle_discard(request, list(queryset), projects, acting_user) statusDetails = result.pop('statusDetails', result) status = result.get('status') release = None commit = None if status in ('resolved', 'resolvedInNextRelease'): if status == 'resolvedInNextRelease' or statusDetails.get( 'inNextRelease'): # TODO(jess): We may want to support this for multi project, but punting on it for now if len(projects) > 1: return Response( { 'detail': 'Cannot set resolved in next release for multiple projects.' }, status=400) release = statusDetails.get( 'inNextRelease') or Release.objects.filter( projects=projects[0], organization_id=projects[0].organization_id, ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).order_by('-sort')[0] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': '', } status_details = { 'inNextRelease': True, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_next_release res_type_str = 'in_next_release' res_status = GroupResolution.Status.pending elif statusDetails.get('inRelease'): # TODO(jess): We could update validation to check if release # applies to multiple projects, but I think we agreed to punt # on this for now if len(projects) > 1: return Response( { 'detail': 'Cannot set resolved in release for multiple projects.' }, status=400) release = statusDetails['inRelease'] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': release.version, } status_details = { 'inRelease': release.version, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_release res_type_str = 'in_release' res_status = GroupResolution.Status.resolved elif statusDetails.get('inCommit'): # TODO(jess): Same here, this is probably something we could do, but # punting for now. if len(projects) > 1: return Response( { 'detail': 'Cannot set resolved in commit for multiple projects.' }, status=400) commit = statusDetails['inCommit'] activity_type = Activity.SET_RESOLVED_IN_COMMIT activity_data = { 'commit': commit.id, } status_details = { 'inCommit': serialize(commit, request.user), 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type_str = 'in_commit' else: res_type_str = 'now' activity_type = Activity.SET_RESOLVED activity_data = {} status_details = {} now = timezone.now() metrics.incr('group.resolved', instance=res_type_str, skip_internal=True) # if we've specified a commit, let's see if its already been released # this will allow us to associate the resolution to a release as if we # were simply using 'inRelease' above # Note: this is different than the way commit resolution works on deploy # creation, as a given deploy is connected to an explicit release, and # in this case we're simply choosing the most recent release which contains # the commit. if commit and not release: # TODO(jess): If we support multiple projects for release / commit resolution, # we need to update this to find the release for each project (we shouldn't assume # it's the same) try: release = Release.objects.filter( projects__in=projects, releasecommit__commit=commit, ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).order_by('-sort')[0] res_type = GroupResolution.Type.in_release res_status = GroupResolution.Status.resolved except IndexError: release = None for group in group_list: with transaction.atomic(): resolution = None if release: resolution_params = { 'release': release, 'type': res_type, 'status': res_status, 'actor_id': request.user.id if request.user.is_authenticated() else None, } resolution, created = GroupResolution.objects.get_or_create( group=group, defaults=resolution_params, ) if not created: resolution.update(datetime=timezone.now(), **resolution_params) if commit: GroupLink.objects.create( group_id=group.id, project_id=group.project_id, linked_type=GroupLink.LinkedType.commit, relationship=GroupLink.Relationship.resolves, linked_id=commit.id, ) affected = Group.objects.filter(id=group.id, ).update( status=GroupStatus.RESOLVED, resolved_at=now, ) if not resolution: created = affected group.status = GroupStatus.RESOLVED group.resolved_at = now assigned_to = self_subscribe_and_assign_issue( acting_user, group) if assigned_to is not None: result['assignedTo'] = assigned_to if created: activity = Activity.objects.create( project=project_lookup[group.project_id], group=group, type=activity_type, user=acting_user, ident=resolution.id if resolution else None, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: activity.send_notification() if release: issue_resolved_in_release.send_robust( group=group, project=project_lookup[group.project_id], user=acting_user, resolution_type=res_type_str, sender=update_groups, ) elif commit: resolved_with_commit.send_robust( organization_id=organization_id, user=request.user, group=group, sender=update_groups, ) else: issue_resolved.send_robust( project=project_lookup[group.project_id], group=group, user=acting_user, sender=update_groups, ) kick_off_status_syncs.apply_async(kwargs={ 'project_id': group.project_id, 'group_id': group.id, }) result.update({ 'status': 'resolved', 'statusDetails': status_details, }) elif status: new_status = STATUS_CHOICES[result['status']] with transaction.atomic(): happened = queryset.exclude(status=new_status, ).update( status=new_status, ) GroupResolution.objects.filter(group__in=group_ids, ).delete() if new_status == GroupStatus.IGNORED: metrics.incr('group.ignored', skip_internal=True) ignore_duration = (statusDetails.pop('ignoreDuration', None) or statusDetails.pop( 'snoozeDuration', None)) or None ignore_count = statusDetails.pop('ignoreCount', None) or None ignore_window = statusDetails.pop('ignoreWindow', None) or None ignore_user_count = statusDetails.pop('ignoreUserCount', None) or None ignore_user_window = statusDetails.pop('ignoreUserWindow', None) or None if ignore_duration or ignore_count or ignore_user_count: if ignore_duration: ignore_until = timezone.now() + timedelta( minutes=ignore_duration, ) else: ignore_until = None for group in group_list: state = {} if ignore_count and not ignore_window: state['times_seen'] = group.times_seen if ignore_user_count and not ignore_user_window: state['users_seen'] = group.count_users_seen() GroupSnooze.objects.create_or_update( group=group, values={ 'until': ignore_until, 'count': ignore_count, 'window': ignore_window, 'user_count': ignore_user_count, 'user_window': ignore_user_window, 'state': state, 'actor_id': request.user.id if request.user.is_authenticated() else None, }) result['statusDetails'] = { 'ignoreCount': ignore_count, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, 'actor': serialize(extract_lazy_object(request.user), request.user), } else: GroupSnooze.objects.filter(group__in=group_ids, ).delete() ignore_until = None result['statusDetails'] = {} else: result['statusDetails'] = {} if group_list and happened: if new_status == GroupStatus.UNRESOLVED: activity_type = Activity.SET_UNRESOLVED activity_data = {} elif new_status == GroupStatus.IGNORED: activity_type = Activity.SET_IGNORED activity_data = { 'ignoreCount': ignore_count, 'ignoreDuration': ignore_duration, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, } groups_by_project_id = defaultdict(list) for group in group_list: groups_by_project_id[group.project_id].append(group) for project in projects: project_groups = groups_by_project_id.get(project.id) if project_groups: issue_ignored.send_robust(project=project, user=acting_user, group_list=project_groups, activity_data=activity_data, sender=update_groups) for group in group_list: group.status = new_status activity = Activity.objects.create( project=project_lookup[group.project_id], group=group, type=activity_type, user=acting_user, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) activity.send_notification() if new_status == GroupStatus.UNRESOLVED: kick_off_status_syncs.apply_async(kwargs={ 'project_id': group.project_id, 'group_id': group.id, }) if 'assignedTo' in result: assigned_actor = result['assignedTo'] if assigned_actor: for group in group_list: resolved_actor = assigned_actor.resolve() GroupAssignee.objects.assign(group, resolved_actor, acting_user) result['assignedTo'] = serialize(assigned_actor.resolve(), acting_user, ActorSerializer()) else: for group in group_list: GroupAssignee.objects.deassign(group, acting_user) is_member_map = { project.id: project.member_set.filter(user=acting_user).exists() for project in projects } if result.get('hasSeen'): for group in group_list: if is_member_map.get(group.project_id): instance, created = create_or_update( GroupSeen, group=group, user=acting_user, project=project_lookup[group.project_id], values={ 'last_seen': timezone.now(), }) elif result.get('hasSeen') is False: GroupSeen.objects.filter( group__in=group_ids, user=acting_user, ).delete() if result.get('isBookmarked'): for group in group_list: GroupBookmark.objects.get_or_create( project=project_lookup[group.project_id], group=group, user=acting_user, ) GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark, ) elif result.get('isBookmarked') is False: GroupBookmark.objects.filter( group__in=group_ids, user=acting_user, ).delete() # TODO(dcramer): we could make these more efficient by first # querying for rich rows are present (if N > 2), flipping the flag # on those rows, and then creating the missing rows if result.get('isSubscribed') in (True, False): is_subscribed = result['isSubscribed'] for group in group_list: # NOTE: Subscribing without an initiating event (assignment, # commenting, etc.) clears out the previous subscription reason # to avoid showing confusing messaging as a result of this # action. It'd be jarring to go directly from "you are not # subscribed" to "you were subscribed due since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway.) GroupSubscription.objects.create_or_update( user=acting_user, group=group, project=project_lookup[group.project_id], values={ 'is_active': is_subscribed, 'reason': GroupSubscriptionReason.unknown, }, ) result['subscriptionDetails'] = { 'reason': SUBSCRIPTION_REASON_MAP.get( GroupSubscriptionReason.unknown, 'unknown', ), } if 'isPublic' in result: # We always want to delete an existing share, because triggering # an isPublic=True even when it's already public, should trigger # regenerating. for group in group_list: if GroupShare.objects.filter(group=group).delete(): result['shareId'] = None Activity.objects.create( project=project_lookup[group.project_id], group=group, type=Activity.SET_PRIVATE, user=acting_user, ) if result.get('isPublic'): for group in group_list: share, created = GroupShare.objects.get_or_create( project=project_lookup[group.project_id], group=group, user=acting_user, ) if created: result['shareId'] = share.uuid Activity.objects.create( project=project_lookup[group.project_id], group=group, type=Activity.SET_PUBLIC, user=acting_user, ) # XXX(dcramer): this feels a bit shady like it should be its own # endpoint if result.get('merge') and len(group_list) > 1: # don't allow merging cross project if len(projects) > 1: return Response({ 'detail': 'Merging across multiple projects is not supported' }) group_list_by_times_seen = sorted( group_list, key=lambda g: (g.times_seen, g.id), reverse=True, ) primary_group, groups_to_merge = group_list_by_times_seen[ 0], group_list_by_times_seen[1:] group_ids_to_merge = [g.id for g in groups_to_merge] eventstream_state = eventstream.start_merge(primary_group.project_id, group_ids_to_merge, primary_group.id) Group.objects.filter(id__in=group_ids_to_merge).update( status=GroupStatus.PENDING_MERGE) transaction_id = uuid4().hex merge_groups.delay( from_object_ids=group_ids_to_merge, to_object_id=primary_group.id, transaction_id=transaction_id, eventstream_state=eventstream_state, ) Activity.objects.create( project=project_lookup[primary_group.project_id], group=primary_group, type=Activity.MERGE, user=acting_user, data={ 'issues': [{ 'id': c.id } for c in groups_to_merge], }, ) result['merge'] = { 'parent': six.text_type(primary_group.id), 'children': [six.text_type(g.id) for g in groups_to_merge], } return Response(result)
def get_event_payload(self, event): return serialize(event)
def put(self, request, project): """ Bulk Mutate a List of Issues ```````````````````````````` Bulk mutate various attributes on issues. The list of issues to modify is given through the `id` query parameter. It is repeated for each issue that should be modified. - For non-status updates, the `id` query parameter is required. - For status updates, the `id` query parameter may be omitted for a batch "update all" query. - An optional `status` query parameter may be used to restrict mutations to only events with the given status. The following attributes can be modified and are supplied as JSON object in the body: If any ids are out of scope this operation will succeed without any data mutation. :qparam int id: a list of IDs of the issues to be mutated. This parameter shall be repeated for each issue. It is optional only if a status is mutated in which case an implicit `update all` is assumed. :qparam string status: optionally limits the query to issues of the specified status. Valid values are ``"resolved"``, ``"unresolved"`` and ``"ignored"``. :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :param string status: the new status for the issues. Valid values are ``"resolved"``, ``"resolvedInNextRelease"``, ``"unresolved"``, and ``"ignored"``. :param int ignoreDuration: the number of minutes to ignore this issue. :param boolean isPublic: sets the issue to public or private. :param boolean merge: allows to merge or unmerge different issues. :param string assignedTo: the username of the user that should be assigned to this issue. :param boolean hasSeen: in case this API call is invoked with a user context this allows changing of the flag that indicates if the user has seen the event. :param boolean isBookmarked: in case this API call is invoked with a user context this allows changing of the bookmark flag. :auth: required """ group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter(project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] if not group_ids: return Response(status=204) else: group_list = None serializer = GroupValidator( data=request.DATA, partial=True, context={'project': project}, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) result = dict(serializer.object) acting_user = request.user if request.user.is_authenticated() else None if not group_ids: try: query_kwargs = self._build_query_params_from_request( request, project) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) # bulk mutations are limited to 1000 items # TODO(dcramer): it'd be nice to support more than this, but its # a bit too complicated right now limit = 1000 query_kwargs['limit'] = limit # the paginator has a default max_limit of 100, which must be overwritten. cursor_result = search.query( paginator_options={'max_limit': limit}, **query_kwargs) group_list = list(cursor_result) group_ids = [g.id for g in group_list] is_bulk = len(group_ids) > 1 queryset = Group.objects.filter(id__in=group_ids, ) discard = result.get('discard') if discard: if not features.has( 'projects:discard-groups', project, actor=request.user): return Response( {'detail': ['You do not have that feature enabled']}, status=400) group_list = list(queryset) groups_to_delete = [] for group in group_list: with transaction.atomic(): try: tombstone = GroupTombstone.objects.create( previous_group_id=group.id, actor_id=acting_user.id if acting_user else None, **{ name: getattr(group, name) for name in TOMBSTONE_FIELDS_FROM_GROUP }) except IntegrityError: # in this case, a tombstone has already been created # for a group, so no hash updates are necessary pass else: groups_to_delete.append(group) GroupHash.objects.filter(group=group, ).update( group=None, group_tombstone_id=tombstone.id, ) self._delete_groups(request, project, groups_to_delete) return Response(status=204) statusDetails = result.pop('statusDetails', result) status = result.get('status') if status in ('resolved', 'resolvedInNextRelease'): if status == 'resolvedInNextRelease' or statusDetails.get( 'inNextRelease'): release = Release.objects.filter( projects=project, organization_id=project.organization_id, ).order_by('-date_added')[0] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': '', } status_details = { 'inNextRelease': True, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_next_release res_status = GroupResolution.Status.pending elif statusDetails.get('inRelease'): release = statusDetails['inRelease'] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': release.version, } status_details = { 'inRelease': release.version, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_release res_status = GroupResolution.Status.resolved else: release = None activity_type = Activity.SET_RESOLVED activity_data = {} status_details = {} now = timezone.now() for group in group_list: with transaction.atomic(): if release: resolution_params = { 'release': release, 'type': res_type, 'status': res_status, 'actor_id': request.user.id if request.user.is_authenticated() else None, } resolution, created = GroupResolution.objects.get_or_create( group=group, defaults=resolution_params, ) if not created: resolution.update(datetime=timezone.now(), **resolution_params) else: resolution = None affected = Group.objects.filter(id=group.id, ).update( status=GroupStatus.RESOLVED, resolved_at=now, ) if not resolution: created = affected group.status = GroupStatus.RESOLVED group.resolved_at = now self._subscribe_and_assign_issue(acting_user, group, result) if created: activity = Activity.objects.create( project=group.project, group=group, type=activity_type, user=acting_user, ident=resolution.id if resolution else None, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: activity.send_notification() issue_resolved_in_release.send( group=group, project=project, sender=acting_user, ) result.update({ 'status': 'resolved', 'statusDetails': status_details, }) elif status: new_status = STATUS_CHOICES[result['status']] with transaction.atomic(): happened = queryset.exclude(status=new_status, ).update( status=new_status, ) GroupResolution.objects.filter(group__in=group_ids, ).delete() if new_status == GroupStatus.IGNORED: ignore_duration = ( statusDetails.pop('ignoreDuration', None) or statusDetails.pop('snoozeDuration', None)) or None ignore_count = statusDetails.pop('ignoreCount', None) or None ignore_window = statusDetails.pop('ignoreWindow', None) or None ignore_user_count = statusDetails.pop( 'ignoreUserCount', None) or None ignore_user_window = statusDetails.pop( 'ignoreUserWindow', None) or None if ignore_duration or ignore_count or ignore_user_count: if ignore_duration: ignore_until = timezone.now() + timedelta( minutes=ignore_duration, ) else: ignore_until = None for group in group_list: state = {} if ignore_count and not ignore_window: state['times_seen'] = group.times_seen if ignore_user_count and not ignore_user_window: state['users_seen'] = group.count_users_seen() GroupSnooze.objects.create_or_update( group=group, values={ 'until': ignore_until, 'count': ignore_count, 'window': ignore_window, 'user_count': ignore_user_count, 'user_window': ignore_user_window, 'state': state, 'actor_id': request.user.id if request.user.is_authenticated() else None, }) result['statusDetails'] = { 'ignoreCount': ignore_count, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, 'actor': serialize(extract_lazy_object(request.user), request.user), } else: GroupSnooze.objects.filter( group__in=group_ids, ).delete() ignore_until = None result['statusDetails'] = {} else: result['statusDetails'] = {} if group_list and happened: if new_status == GroupStatus.UNRESOLVED: activity_type = Activity.SET_UNRESOLVED activity_data = {} elif new_status == GroupStatus.IGNORED: activity_type = Activity.SET_IGNORED activity_data = { 'ignoreCount': ignore_count, 'ignoreDuration': ignore_duration, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, } for group in group_list: group.status = new_status activity = Activity.objects.create( project=group.project, group=group, type=activity_type, user=acting_user, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) activity.send_notification() if 'assignedTo' in result: if result['assignedTo']: for group in group_list: GroupAssignee.objects.assign(group, result['assignedTo'], acting_user) if 'isSubscribed' not in result or result[ 'assignedTo'] != request.user: GroupSubscription.objects.subscribe( group=group, user=result['assignedTo'], reason=GroupSubscriptionReason.assigned, ) result['assignedTo'] = serialize(result['assignedTo']) else: for group in group_list: GroupAssignee.objects.deassign(group, acting_user) if result.get('hasSeen') and project.member_set.filter( user=acting_user).exists(): for group in group_list: instance, created = create_or_update(GroupSeen, group=group, user=acting_user, project=group.project, values={ 'last_seen': timezone.now(), }) elif result.get('hasSeen') is False: GroupSeen.objects.filter( group__in=group_ids, user=acting_user, ).delete() if result.get('isBookmarked'): for group in group_list: GroupBookmark.objects.get_or_create( project=project, group=group, user=acting_user, ) GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark, ) elif result.get('isBookmarked') is False: GroupBookmark.objects.filter( group__in=group_ids, user=acting_user, ).delete() # TODO(dcramer): we could make these more efficient by first # querying for rich rows are present (if N > 2), flipping the flag # on those rows, and then creating the missing rows if result.get('isSubscribed') in (True, False): is_subscribed = result['isSubscribed'] for group in group_list: # NOTE: Subscribing without an initiating event (assignment, # commenting, etc.) clears out the previous subscription reason # to avoid showing confusing messaging as a result of this # action. It'd be jarring to go directly from "you are not # subscribed" to "you were subscribed due since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway.) GroupSubscription.objects.create_or_update( user=acting_user, group=group, project=project, values={ 'is_active': is_subscribed, 'reason': GroupSubscriptionReason.unknown, }, ) result['subscriptionDetails'] = { 'reason': SUBSCRIPTION_REASON_MAP.get( GroupSubscriptionReason.unknown, 'unknown', ), } if 'isPublic' in result: # We always want to delete an existing share, because triggering # an isPublic=True even when it's already public, should trigger # regenerating. for group in group_list: if GroupShare.objects.filter(group=group).delete(): result['shareId'] = None Activity.objects.create( project=group.project, group=group, type=Activity.SET_PRIVATE, user=acting_user, ) if result.get('isPublic'): for group in group_list: share, created = GroupShare.objects.get_or_create( project=group.project, group=group, user=acting_user, ) if created: result['shareId'] = share.uuid Activity.objects.create( project=group.project, group=group, type=Activity.SET_PUBLIC, user=acting_user, ) # XXX(dcramer): this feels a bit shady like it should be its own # endpoint if result.get('merge') and len(group_list) > 1: primary_group = sorted(group_list, key=lambda x: -x.times_seen)[0] children = [] transaction_id = uuid4().hex for group in group_list: if group == primary_group: continue children.append(group) group.update(status=GroupStatus.PENDING_MERGE) merge_group.delay( from_object_id=group.id, to_object_id=primary_group.id, transaction_id=transaction_id, ) Activity.objects.create( project=primary_group.project, group=primary_group, type=Activity.MERGE, user=acting_user, data={ 'issues': [{ 'id': c.id } for c in children], }, ) result['merge'] = { 'parent': six.text_type(primary_group.id), 'children': [six.text_type(g.id) for g in children], } return Response(result)
def _finish_pipeline(self, data): if "reinstall_id" in data: self.integration = Integration.objects.get( provider=self.provider.integration_key, id=data["reinstall_id"]) self.integration.update(external_id=data["external_id"], status=ObjectStatus.VISIBLE) self.integration.get_installation(self.organization.id).reinstall() elif "expect_exists" in data: self.integration = Integration.objects.get( provider=self.provider.integration_key, external_id=data["external_id"]) else: self.integration = ensure_integration( self.provider.integration_key, data) # Does this integration provide a user identity for the user setting up # the integration? identity = data.get("user_identity") if identity: # Some identity providers may not be directly associated to the # external integration. Integrations may specify the external_id to # be used for the idp. idp_external_id = data.get("idp_external_id", data["external_id"]) idp_config = data.get("idp_config", {}) # Create identity provider for this integration if necessary idp, created = IdentityProvider.objects.get_or_create( external_id=idp_external_id, type=identity["type"], defaults={"config": idp_config}) if not created: idp.update(config=idp_config) identity_data = { "status": IdentityStatus.VALID, "scopes": identity["scopes"], "data": identity["data"], "date_verified": timezone.now(), } try: identity_model, created = Identity.objects.get_or_create( idp=idp, user=self.request.user, external_id=identity["external_id"], defaults=identity_data, ) if not created: identity_model.update(**identity_data) except IntegrityError: # If the external_id is already used for a different user or # the user already has a different external_id remove those # identities and recreate it, except in the case of Identities # being used for login. if idp.type in ("github", "vsts", "google"): try: other_identity = Identity.objects.get( idp=idp, external_id=identity["external_id"]) except Identity.DoesNotExist: # The user is linked to a different external_id. It's ok to relink # here because they'll still be able to log in with the new external_id. pass else: # The external_id is linked to a different user. If that user doesn't # have a password, we don't delete the link as it may lock them out. if not other_identity.user.has_usable_password(): proper_name = idp.get_provider().name return self._dialog_response( { "error": _("The provided %s account is linked to a different Sentry user. " "To continue linking the current Sentry user, please use a different %s account." ) % (proper_name, proper_name) }, False, ) identity_model = Identity.reattach(idp, identity["external_id"], self.request.user, identity_data) default_auth_id = None if self.provider.needs_default_identity: if not (identity and identity_model): raise NotImplementedError("Integration requires an identity") default_auth_id = identity_model.id org_integration = self.integration.add_organization( self.organization, self.request.user, default_auth_id=default_auth_id) return self._dialog_response( serialize(org_integration, self.request.user), True)
def post(self, request, organization): """ Create a new Team `````````````````` Create a new team bound to an organization. Only the name of the team is needed to create it, the slug can be auto generated. :pparam string organization_slug: the slug of the organization the team should be created for. :param string name: the optional name of the team. :param string slug: the optional slug for this team. If not provided it will be auto generated from the name. :auth: required """ serializer = TeamSerializer(data=request.DATA) if serializer.is_valid(): result = serializer.object try: with transaction.atomic(): team = Team.objects.create( name=result.get('name') or result['slug'], slug=result.get('slug'), organization=organization, ) except IntegrityError: return Response( { 'non_field_errors': [CONFLICTING_SLUG_ERROR], 'detail': CONFLICTING_SLUG_ERROR, }, status=409, ) else: team_created.send_robust( organization=organization, user=request.user, team=team, sender=self.__class__, ) if request.user.is_authenticated(): try: member = OrganizationMember.objects.get( user=request.user, organization=organization, ) except OrganizationMember.DoesNotExist: pass else: OrganizationMemberTeam.objects.create( team=team, organizationmember=member, ) self.create_audit_entry( request=request, organization=organization, target_object=team.id, event=AuditLogEntryEvent.TEAM_ADD, data=team.get_audit_log_data(), ) return Response(serialize(team, request.user), status=201) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)