class File(Model): __core__ = False name = models.CharField(max_length=128) type = models.CharField(max_length=64) timestamp = models.DateTimeField(default=timezone.now, db_index=True) headers = JSONField() blob = FlexibleForeignKey('sentry.FileBlob', null=True) # <Legacy fields> storage = models.CharField(max_length=128, null=True) storage_options = JSONField() path = models.TextField(null=True) size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, null=True) # </Legacy fields> class Meta: app_label = 'sentry' db_table = 'sentry_file' def delete(self, *args, **kwargs): super(File, self).delete(*args, **kwargs) if self.blob and not File.objects.filter(blob=self.blob).exists(): self.blob.delete() def ensure_blob(self): if self.blob: return lock_key = 'fileblob:convert:{}'.format(self.checksum) with Lock(lock_key, timeout=60): blob, created = FileBlob.objects.get_or_create( checksum=self.checksum, defaults={ 'storage': self.storage, 'storage_options': self.storage_options, 'path': self.path, 'size': self.size, 'timestamp': self.timestamp, }, ) # if this blob already existed, lets kill the duplicate # TODO(dcramer): kill data when fully migrated # if self.path != blob.path: # get_storage_class(self.storage)( # **self.storage_options # ).delete(self.path) self.update( blob=blob, # TODO(dcramer): kill data when fully migrated # checksum=None, # path=None, # storage=None, # storage_options={}, ) def getfile(self, *args, **kwargs): self.ensure_blob() return self.blob.getfile(*args, **kwargs)
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey('sentry.Project', null=True) logger = models.CharField( max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField( choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField( max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=( (GroupStatus.UNRESOLVED, _('Unresolved')), (GroupStatus.RESOLVED, _('Resolved')), (GroupStatus.MUTED, _('Muted')), ), db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey('sentry.Release', null=True) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) redmine_id = models.CharField(max_length=8) # add by hzwangzhiwei @20160411 blame_info = models.TextField() # add by hzwangzhiwei @20160612 follower = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True) # add by hzwangzhiwei @20160824 objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = ( ("can_view", "Can view"), ) index_together = ( ('project', 'first_release'), ) __repr__ = sane_repr('project_id', 'follower_id') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen if self.message: # We limit what we store for the message body self.message = self.message.splitlines()[0][:255] super(Group, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri(reverse('sentry-group', args=[ self.organization.slug, self.project.slug, self.id])) @property def avg_time_spent(self): if not self.time_spent_count: return return float(self.time_spent_total) / self.time_spent_count def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta(hours=int(resolve_age)) def is_muted(self): return self.get_status() == GroupStatus.MUTED def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze if self.status == GroupStatus.MUTED: try: snooze = GroupSnooze.objects.get(group=self) except GroupSnooze.DoesNotExist: pass else: # XXX(dcramer): if the snooze row exists then we need # to confirm its still valid if snooze.until > timezone.now(): return GroupStatus.MUTED else: return GroupStatus.UNRESOLVED if self.status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return self.status def get_share_id(self): return b16encode('{}.{}'.format(self.project_id, self.id)).lower() @classmethod def from_share_id(cls, share_id): try: project_id, group_id = b16decode(share_id.upper()).split('.') except ValueError: raise cls.DoesNotExist return cls.objects.get(project=project_id, id=group_id) def get_score(self): return int(math.log(self.times_seen) * 600 + float(time.mktime(self.last_seen.timetuple()))) def get_latest_event(self): from sentry.models import Event if not hasattr(self, '_latest_event'): latest_events = sorted( Event.objects.filter( group=self, ).order_by('-datetime')[0:5], key=EVENT_ORDERING_KEY, reverse=True, ) try: self._latest_event = latest_events[0] except IndexError: self._latest_event = None return self._latest_event def get_oldest_event(self): from sentry.models import Event if not hasattr(self, '_oldest_event'): oldest_events = sorted( Event.objects.filter( group=self, ).order_by('datetime')[0:5], key=EVENT_ORDERING_KEY, ) try: self._oldest_event = oldest_events[0] except IndexError: self._oldest_event = None return self._oldest_event def get_unique_tags(self, tag, since=None, order_by='-times_seen'): # TODO(dcramer): this has zero test coverage and is a critical path from sentry.models import GroupTagValue queryset = GroupTagValue.objects.filter( group=self, key=tag, ) if since: queryset = queryset.filter(last_seen__gte=since) return queryset.values_list( 'value', 'times_seen', 'first_seen', 'last_seen', ).order_by(order_by) def get_tags(self, with_internal=True): from sentry.models import GroupTagKey, TagKey if not hasattr(self, '_tag_cache'): group_tags = GroupTagKey.objects.filter( group=self, project=self.project, ) if not with_internal: group_tags = group_tags.exclude(key__startswith='sentry:') group_tags = list(group_tags.values_list('key', flat=True)) tag_keys = dict( (t.key, t) for t in TagKey.objects.filter( project=self.project, key__in=group_tags ) ) results = [] for key in group_tags: try: tag_key = tag_keys[key] except KeyError: label = key.replace('_', ' ').title() else: label = tag_key.get_label() results.append({ 'key': key, 'label': label, }) self._tag_cache = sorted(results, key=lambda x: x['label']) return self._tag_cache def error(self): return self.message error.short_description = _('error') def has_two_part_message(self): message = strip(self.message) return '\n' in message or len(message) > 100 @property def title(self): culprit = strip(self.culprit) if culprit: return culprit return self.message @property def message_short(self): message = strip(self.message) if not message: message = '<unlabeled message>' else: message = truncatechars(message.splitlines()[0], 100) return message @property def organization(self): return self.project.organization @property def team(self): return self.project.team @property def checksum(self): warnings.warn('Group.checksum is no longer used', DeprecationWarning) return '' def get_email_subject(self): return '[%s] %s: %s' % ( self.project.get_full_name().encode('utf-8'), six.text_type(self.get_level_display()).upper().encode('utf-8'), self.message_short.encode('utf-8') )
class Project(Model): """ Projects are permission based namespaces which generally are the top level entry point for all data. """ PLATFORM_CHOICES = tuple( (p, PLATFORM_TITLES.get(p, p.title())) for p in PLATFORM_LIST ) + (('other', 'Other'),) slug = models.SlugField(null=True) name = models.CharField(max_length=200) organization = FlexibleForeignKey('sentry.Organization') team = FlexibleForeignKey('sentry.Team') public = models.BooleanField(default=False) date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField(default=0, choices=( (ProjectStatus.VISIBLE, _('Active')), (ProjectStatus.PENDING_DELETION, _('Pending Deletion')), (ProjectStatus.DELETION_IN_PROGRESS, _('Deletion in Progress')), ), db_index=True) platform = models.CharField(max_length=32, choices=PLATFORM_CHOICES, null=True) objects = ProjectManager(cache_fields=[ 'pk', 'slug', ]) class Meta: app_label = 'sentry' db_table = 'sentry_project' unique_together = (('team', 'slug'), ('organization', 'slug')) __repr__ = sane_repr('team_id', 'slug') def __unicode__(self): return u'%s (%s)' % (self.name, self.slug) def save(self, *args, **kwargs): if not self.slug: slugify_instance(self, self.name, organization=self.organization) super(Project, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri(reverse('sentry-stream', args=[ self.organization.slug, self.slug])) def merge_to(self, project): from sentry.models import ( Group, GroupTagValue, Event, TagValue ) if not isinstance(project, Project): project = Project.objects.get_from_cache(pk=project) for group in Group.objects.filter(project=self): try: other = Group.objects.get( project=project, ) except Group.DoesNotExist: group.update(project=project) for model in (Event, GroupTagValue): model.objects.filter(project=self, group=group).update(project=project) else: Event.objects.filter(group=group).update(group=other) for obj in GroupTagValue.objects.filter(group=group): obj2, created = GroupTagValue.objects.get_or_create( project=project, group=group, key=obj.key, value=obj.value, defaults={'times_seen': obj.times_seen} ) if not created: obj2.update(times_seen=F('times_seen') + obj.times_seen) for fv in TagValue.objects.filter(project=self): TagValue.objects.get_or_create(project=project, key=fv.key, value=fv.value) fv.delete() self.delete() def is_internal_project(self): for value in (settings.SENTRY_FRONTEND_PROJECT, settings.SENTRY_PROJECT): if str(self.id) == str(value) or str(self.slug) == str(value): return True return False def get_tags(self, with_internal=True): from sentry.models import TagKey if not hasattr(self, '_tag_cache'): tags = self.get_option('tags', None) if tags is None: tags = [ t for t in TagKey.objects.all_keys(self) if with_internal or not t.startswith('sentry:') ] self._tag_cache = tags return self._tag_cache # TODO: Make these a mixin def update_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.set_value(self, *args, **kwargs) def get_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.get_value(self, *args, **kwargs) def delete_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.unset_value(self, *args, **kwargs) @property def member_set(self): from sentry.models import OrganizationMember return self.organization.member_set.filter( Q(organizationmemberteam__team=self.team) | Q(has_global_access=True), user__is_active=True, ).exclude( id__in=OrganizationMember.objects.filter( organizationmemberteam__is_active=False, organizationmemberteam__team=self.team, ).values('id') ).distinct() def has_access(self, user, access=None): from sentry.models import AuthIdentity, OrganizationMember warnings.warn('Project.has_access is deprecated.', DeprecationWarning) queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) try: member = queryset.get() except OrganizationMember.DoesNotExist: return False try: auth_identity = AuthIdentity.objects.get( auth_provider__organization=self.organization_id, user=member.user_id, ) except AuthIdentity.DoesNotExist: return True return auth_identity.is_valid(member) def get_audit_log_data(self): return { 'slug': self.slug, 'name': self.name, 'status': self.status, 'public': self.public, 'platform': self.platform, } def get_full_name(self): if self.team.name not in self.name: return '%s %s' % (self.team.name, self.name) return self.name
class ProjectKey(Model): __core__ = True project = FlexibleForeignKey("sentry.Project", related_name="key_set") label = models.CharField(max_length=64, blank=True, null=True) public_key = models.CharField(max_length=32, unique=True, null=True) secret_key = models.CharField(max_length=32, unique=True, null=True) roles = BitField( flags=( # access to post events to the store endpoint ("store", "Event API access"), # read/write access to rest API ("api", "Web API access"), ), default=["store"], ) status = BoundedPositiveIntegerField( default=0, choices=( (ProjectKeyStatus.ACTIVE, _("Active")), (ProjectKeyStatus.INACTIVE, _("Inactive")), ), db_index=True, ) date_added = models.DateTimeField(default=timezone.now, null=True) rate_limit_count = BoundedPositiveIntegerField(null=True) rate_limit_window = BoundedPositiveIntegerField(null=True) objects = ProjectKeyManager( cache_fields=("public_key", "secret_key"), # store projectkeys in memcached for longer than other models, # specifically to make the relay_projectconfig endpoint faster. cache_ttl=60 * 30, ) data = JSONField() # support legacy project keys in API scopes = ( "project:read", "project:write", "project:admin", "project:releases", "event:read", "event:write", "event:admin", ) class Meta: app_label = "sentry" db_table = "sentry_projectkey" __repr__ = sane_repr("project_id", "public_key") def __str__(self): return str(self.public_key) @classmethod def generate_api_key(cls): return uuid4().hex @classmethod def looks_like_api_key(cls, key): return bool(_uuid4_re.match(key)) @classmethod def from_dsn(cls, dsn): urlparts = urlparse(dsn) public_key = urlparts.username project_id = urlparts.path.rsplit("/", 1)[-1] try: return ProjectKey.objects.get(public_key=public_key, project=project_id) except ValueError: # ValueError would come from a non-integer project_id, # which is obviously a DoesNotExist. We catch and rethrow this # so anything downstream expecting DoesNotExist works fine raise ProjectKey.DoesNotExist( "ProjectKey matching query does not exist.") @classmethod def get_default(cls, project): return cls.objects.filter( project=project, roles=models.F("roles").bitor(cls.roles.store), status=ProjectKeyStatus.ACTIVE, ).first() @property def is_active(self): return self.status == ProjectKeyStatus.ACTIVE @property def rate_limit(self): if self.rate_limit_count and self.rate_limit_window: return (self.rate_limit_count, self.rate_limit_window) return (0, 0) def save(self, *args, **kwargs): if not self.public_key: self.public_key = ProjectKey.generate_api_key() if not self.secret_key: self.secret_key = ProjectKey.generate_api_key() if not self.label: self.label = petname.Generate(2, " ", letters=10).title() super().save(*args, **kwargs) def get_dsn(self, domain=None, secure=True, public=False): urlparts = urlparse(self.get_endpoint(public=public)) if not public: key = "%s:%s" % (self.public_key, self.secret_key) else: key = self.public_key # If we do not have a scheme or domain/hostname, dsn is never valid if not urlparts.netloc or not urlparts.scheme: return "" return "%s://%s@%s/%s" % ( urlparts.scheme, key, urlparts.netloc + urlparts.path, self.project_id, ) @property def organization_id(self): return self.project.organization_id @property def organization(self): return self.project.organization @property def dsn_private(self): return self.get_dsn(public=False) @property def dsn_public(self): return self.get_dsn(public=True) @property def csp_endpoint(self): endpoint = self.get_endpoint() return "%s/api/%s/csp-report/?sentry_key=%s" % ( endpoint, self.project_id, self.public_key) @property def security_endpoint(self): endpoint = self.get_endpoint() return "%s/api/%s/security/?sentry_key=%s" % ( endpoint, self.project_id, self.public_key) @property def minidump_endpoint(self): endpoint = self.get_endpoint() return "%s/api/%s/minidump/?sentry_key=%s" % ( endpoint, self.project_id, self.public_key) @property def unreal_endpoint(self): return "%s/api/%s/unreal/%s/" % (self.get_endpoint(), self.project_id, self.public_key) @property def js_sdk_loader_cdn_url(self): if settings.JS_SDK_LOADER_CDN_URL: return "%s%s.min.js" % (settings.JS_SDK_LOADER_CDN_URL, self.public_key) else: endpoint = self.get_endpoint() return "%s%s" % ( endpoint, reverse("sentry-js-sdk-loader", args=[self.public_key, ".min" ]), ) def get_endpoint(self, public=True): if public: endpoint = settings.SENTRY_PUBLIC_ENDPOINT or settings.SENTRY_ENDPOINT else: endpoint = settings.SENTRY_ENDPOINT if not endpoint: endpoint = options.get("system.url-prefix") if features.has("organizations:org-subdomains", self.project.organization): urlparts = urlparse(endpoint) if urlparts.scheme and urlparts.netloc: endpoint = "%s://%s.%s%s" % ( urlparts.scheme, settings.SENTRY_ORG_SUBDOMAIN_TEMPLATE.format( organization_id=self.project.organization_id), urlparts.netloc, urlparts.path, ) return endpoint def get_allowed_origins(self): from sentry.utils.http import get_origins return get_origins(self.project) def get_audit_log_data(self): return { "label": self.label, "public_key": self.public_key, "secret_key": self.secret_key, "roles": int(self.roles), "status": self.status, "rate_limit_count": self.rate_limit_count, "rate_limit_window": self.rate_limit_window, } def get_scopes(self): return self.scopes
class EventUser(Model): __core__ = False project_id = BoundedPositiveIntegerField(db_index=True) hash = models.CharField(max_length=32) ident = models.CharField(max_length=128, null=True) email = models.EmailField(null=True, max_length=MAX_EMAIL_FIELD_LENGTH) username = models.CharField(max_length=128, null=True) name = models.CharField(max_length=128, null=True) ip_address = models.GenericIPAddressField(null=True) date_added = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = "sentry" db_table = "sentry_eventuser" unique_together = (("project_id", "ident"), ("project_id", "hash")) index_together = ( ("project_id", "email"), ("project_id", "username"), ("project_id", "ip_address"), ) __repr__ = sane_repr("project_id", "ident", "email", "username", "ip_address") @classmethod def attr_from_keyword(cls, keyword): return KEYWORD_MAP.get_key(keyword) @classmethod def hash_from_tag(cls, value): return md5_text(value.split(":", 1)[-1]).hexdigest() @classmethod def for_tags(cls, project_id, values): """ Finds matching EventUser objects from a list of tag values. Return a dictionary of {tag_value: event_user}. """ hashes = [cls.hash_from_tag(v) for v in values] return { e.tag_value: e for e in cls.objects.filter(project_id=project_id, hash__in=hashes) } def save(self, *args, **kwargs): assert (self.ident or self.username or self.email or self.ip_address), "No identifying value found for user" if not self.hash: self.set_hash() super().save(*args, **kwargs) def set_hash(self): self.hash = self.build_hash() def build_hash(self): for key, value in self.iter_attributes(): if value: return md5_text(value).hexdigest() @property def tag_value(self): """ Return the identifier used with tags to link this user. """ for key, value in self.iter_attributes(): if value: return "{}:{}".format(KEYWORD_MAP[key], value) def iter_attributes(self): """ Iterate over key/value pairs for this EventUser in priority order. """ for key in KEYWORD_MAP.keys(): yield key, getattr(self, key) def get_label(self): return self.email or self.username or self.ident or self.ip_address def get_display_name(self): return self.name or self.email or self.username def find_similar_users(self, user): from sentry.models import OrganizationMemberTeam, Project # limit to only teams user has opted into project_ids = list( Project.objects.filter( teams__in=OrganizationMemberTeam.objects.filter( organizationmember__user=user, organizationmember__organization__project=self.project_id, is_active=True, ).values("team")).values_list("id", flat=True)[:1000]) if not project_ids: return type(self).objects.none() filters = [] if self.email: filters.append(models.Q(email=self.email)) if self.ip_address: filters.append(models.Q(ip_address=self.ip_address)) if not filters: return type(self).objects.none() return (type(self).objects.exclude(id=self.id).filter( reduce(or_, filters), project_id__in=project_ids))
class Project(Model, PendingDeletionMixin): """ Projects are permission based namespaces which generally are the top level entry point for all data. """ __core__ = True slug = models.SlugField(null=True) name = models.CharField(max_length=200) forced_color = models.CharField(max_length=6, null=True, blank=True) organization = FlexibleForeignKey("sentry.Organization") teams = models.ManyToManyField("sentry.Team", related_name="teams", through=ProjectTeam) public = models.BooleanField(default=False) date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField( default=0, choices=( (ObjectStatus.VISIBLE, _("Active")), (ObjectStatus.PENDING_DELETION, _("Pending Deletion")), (ObjectStatus.DELETION_IN_PROGRESS, _("Deletion in Progress")), ), db_index=True, ) # projects that were created before this field was present # will have their first_event field set to date_added first_event = models.DateTimeField(null=True) flags = BitField( flags=( (u"has_releases", u"This Project has sent release data"), (u"has_issue_alerts_targeting", u"This Project has issue alerts targeting"), (u"has_transactions", u"This Project has sent transactions"), (u"has_alert_filters", u"This Project has filters"), ), default=10, null=True, ) objects = ProjectManager(cache_fields=["pk"]) platform = models.CharField(max_length=64, null=True) class Meta: app_label = "sentry" db_table = "sentry_project" unique_together = (("organization", "slug"), ) __repr__ = sane_repr("team_id", "name", "slug") _rename_fields_on_pending_delete = frozenset(["slug"]) def __unicode__(self): return u"%s (%s)" % (self.name, self.slug) def next_short_id(self): from sentry.models import Counter return Counter.increment(self) def save(self, *args, **kwargs): if not self.slug: lock = locks.get("slug:project", duration=5) with TimedRetryPolicy(10)(lock.acquire): slugify_instance( self, self.name, organization=self.organization, reserved=RESERVED_PROJECT_SLUGS, max_length=50, ) super(Project, self).save(*args, **kwargs) else: super(Project, self).save(*args, **kwargs) self.update_rev_for_option() def get_absolute_url(self, params=None): url = u"/organizations/{}/issues/".format(self.organization.slug) params = {} if params is None else params params["project"] = self.id if params: url = url + "?" + urlencode(params) return absolute_uri(url) def is_internal_project(self): for value in (settings.SENTRY_FRONTEND_PROJECT, settings.SENTRY_PROJECT): if six.text_type(self.id) == six.text_type(value) or six.text_type( self.slug) == six.text_type(value): return True return False # TODO: Make these a mixin def update_option(self, *args, **kwargs): return projectoptions.set(self, *args, **kwargs) def get_option(self, *args, **kwargs): return projectoptions.get(self, *args, **kwargs) def delete_option(self, *args, **kwargs): return projectoptions.delete(self, *args, **kwargs) def update_rev_for_option(self): return projectoptions.update_rev_for_option(self) @property def callsign(self): warnings.warn( "Project.callsign is deprecated. Use Group.get_short_id() instead.", DeprecationWarning) return self.slug.upper() @property def color(self): if self.forced_color is not None: return "#%s" % self.forced_color return get_hashed_color(self.callsign or self.slug) @property def member_set(self): from sentry.models import OrganizationMember return self.organization.member_set.filter( id__in=OrganizationMember.objects.filter( organizationmemberteam__is_active=True, organizationmemberteam__team__in=self.teams.all(), ).values("id"), user__is_active=True, ).distinct() def has_access(self, user, access=None): from sentry.models import AuthIdentity, OrganizationMember warnings.warn("Project.has_access is deprecated.", DeprecationWarning) queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) try: member = queryset.get() except OrganizationMember.DoesNotExist: return False try: auth_identity = AuthIdentity.objects.get( auth_provider__organization=self.organization_id, user=member.user_id) except AuthIdentity.DoesNotExist: return True return auth_identity.is_valid(member) def get_audit_log_data(self): return { "id": self.id, "slug": self.slug, "name": self.name, "status": self.status, "public": self.public, } def get_full_name(self): return self.slug def get_member_alert_settings(self, user_option): """ Returns a list of users who have alert notifications explicitly enabled/disabled. :param user_option: alert option key, typically 'mail:alert' :return: A dictionary in format {<user_id>: <int_alert_value>} """ from sentry.models import UserOption return { o.user_id: int(o.value) for o in UserOption.objects.filter(project=self, key=user_option) } def get_notification_recipients(self, user_option): from sentry.models import UserOption alert_settings = self.get_member_alert_settings(user_option) disabled = set(u for u, v in six.iteritems(alert_settings) if v == 0) member_set = set( self.member_set.exclude(user__in=disabled).values_list("user", flat=True)) # determine members default settings members_to_check = set(u for u in member_set if u not in alert_settings) if members_to_check: disabled = set((uo.user_id for uo in UserOption.objects.filter( key="subscribe_by_default", user__in=members_to_check) if uo.value == "0")) member_set = [x for x in member_set if x not in disabled] return member_set def get_mail_alert_subscribers(self): user_ids = self.get_notification_recipients("mail:alert") if not user_ids: return [] from sentry.models import User return list(User.objects.filter(id__in=user_ids)) def is_user_subscribed_to_mail_alerts(self, user): from sentry.models import UserOption is_enabled = UserOption.objects.get_value(user, "mail:alert", project=self) if is_enabled is None: is_enabled = UserOption.objects.get_value(user, "subscribe_by_default", "1") == "1" else: is_enabled = bool(is_enabled) return is_enabled def filter_to_subscribed_users(self, users): """ Filters a list of users down to the users who are subscribed to email alerts. We check both the project level settings and global default settings. """ from sentry.models import UserOption project_options = UserOption.objects.filter( user__in=users, project=self, key="mail:alert").values_list("user_id", "value") user_settings = {user_id: value for user_id, value in project_options} users_without_project_setting = [ user for user in users if user.id not in user_settings ] if users_without_project_setting: user_default_settings = { user_id: value for user_id, value in UserOption.objects.filter( user__in=users_without_project_setting, key="subscribe_by_default", project__isnull=True, ).values_list("user_id", "value") } for user in users_without_project_setting: user_settings[user.id] = int( user_default_settings.get(user.id, "1")) return [user for user in users if bool(user_settings[user.id])] def transfer_to(self, team=None, organization=None): # NOTE: this will only work properly if the new team is in a different # org than the existing one, which is currently the only use case in # production # TODO(jess): refactor this to make it an org transfer only from sentry.models import ( Environment, EnvironmentProject, ProjectTeam, ReleaseProject, ReleaseProjectEnvironment, Rule, ) if organization is None: organization = team.organization old_org_id = self.organization_id org_changed = old_org_id != organization.id self.organization = organization try: with transaction.atomic(): self.update(organization=organization) except IntegrityError: slugify_instance(self, self.name, organization=organization, max_length=50) self.update(slug=self.slug, organization=organization) # Both environments and releases are bound at an organization level. # Due to this, when you transfer a project into another org, we have to # handle this behavior somehow. We really only have two options here: # * Copy over all releases/environments into the new org and handle de-duping # * Delete the bindings and let them reform with new data. # We're generally choosing to just delete the bindings since new data # flowing in will recreate links correctly. The tradeoff is that # historical data is lost, but this is a compromise we're willing to # take and a side effect of allowing this feature. There are exceptions # to this however, such as rules, which should maintain their # configuration when moved across organizations. if org_changed: for model in ReleaseProject, ReleaseProjectEnvironment, EnvironmentProject: model.objects.filter(project_id=self.id).delete() # this is getting really gross, but make sure there aren't lingering associations # with old orgs or teams ProjectTeam.objects.filter( project=self, team__organization_id=old_org_id).delete() rules_by_environment_id = defaultdict(set) for rule_id, environment_id in Rule.objects.filter( project_id=self.id, environment_id__isnull=False).values_list( "id", "environment_id"): rules_by_environment_id[environment_id].add(rule_id) environment_names = dict( Environment.objects.filter( id__in=rules_by_environment_id).values_list("id", "name")) for environment_id, rule_ids in rules_by_environment_id.items(): Rule.objects.filter(id__in=rule_ids).update( environment_id=Environment.get_or_create( self, environment_names[environment_id]).id) # ensure this actually exists in case from team was null if team is not None: self.add_team(team) def add_team(self, team): try: with transaction.atomic(): ProjectTeam.objects.create(project=self, team=team) except IntegrityError: return False else: return True def remove_team(self, team): ProjectTeam.objects.filter(project=self, team=team).delete() def get_security_token(self): lock = locks.get(self.get_lock_key(), duration=5) with TimedRetryPolicy(10)(lock.acquire): security_token = self.get_option("sentry:token", None) if security_token is None: security_token = uuid1().hex self.update_option("sentry:token", security_token) return security_token def get_lock_key(self): return "project_token:%s" % self.id def copy_settings_from(self, project_id): """ Copies project level settings of the inputted project - General Settings - ProjectTeams - Alerts Settings and Rules - EnvironmentProjects - ProjectOwnership Rules and settings - Project Inbound Data Filters Returns True if the settings have successfully been copied over Returns False otherwise """ from sentry.models import EnvironmentProject, ProjectOption, ProjectOwnership, Rule model_list = [EnvironmentProject, ProjectOwnership, ProjectTeam, Rule] project = Project.objects.get(id=project_id) try: with transaction.atomic(): for model in model_list: # remove all previous project settings model.objects.filter(project_id=self.id).delete() # add settings from other project to self for setting in model.objects.filter(project_id=project_id): setting.pk = None setting.project_id = self.id setting.save() options = ProjectOption.objects.get_all_values(project=project) for key, value in six.iteritems(options): self.update_option(key, value) except IntegrityError as e: logging.exception( "Error occurred during copy project settings.", extra={ "error": six.text_type(e), "project_to": self.id, "project_from": project_id, }, ) return False return True @staticmethod def is_valid_platform(value): if not value or value == "other": return True return integration_doc_exists(value)
class ApiKey(Model): __core__ = True organization = FlexibleForeignKey('sentry.Organization', related_name='key_set') label = models.CharField(max_length=64, blank=True, default='Default') key = models.CharField(max_length=32, unique=True) scopes = BitField(flags=( ('project:read', 'project:read'), ('project:write', 'project:write'), ('project:delete', 'project:delete'), ('project:releases', 'project:releases'), ('team:read', 'team:read'), ('team:write', 'team:write'), ('team:delete', 'team:delete'), ('event:read', 'event:read'), ('event:write', 'event:write'), ('event:delete', 'event:delete'), ('org:read', 'org:read'), ('org:write', 'org:write'), ('org:delete', 'org:delete'), ('member:read', 'member:read'), ('member:write', 'member:write'), ('member:delete', 'member:delete'), )) status = BoundedPositiveIntegerField(default=0, choices=( (ApiKeyStatus.ACTIVE, _('Active')), (ApiKeyStatus.INACTIVE, _('Inactive')), ), db_index=True) date_added = models.DateTimeField(default=timezone.now) allowed_origins = models.TextField(blank=True, null=True) objects = BaseManager(cache_fields=('key', )) class Meta: app_label = 'sentry' db_table = 'sentry_apikey' __repr__ = sane_repr('organization_id', 'key') def __unicode__(self): return six.text_type(self.key) @classmethod def generate_api_key(cls): return uuid4().hex @property def is_active(self): return self.status == ApiKeyStatus.ACTIVE def save(self, *args, **kwargs): if not self.key: self.key = ApiKey.generate_api_key() super(ApiKey, self).save(*args, **kwargs) def get_allowed_origins(self): if not self.allowed_origins: return [] return filter(bool, self.allowed_origins.split('\n')) def get_audit_log_data(self): return { 'label': self.label, 'key': self.key, 'scopes': int(self.scopes), 'status': self.status, } def get_scopes(self): return [k for k, v in six.iteritems(self.scopes) if v] def has_scope(self, scope): return scope in self.scopes
class Environment(Model): __core__ = False organization_id = BoundedPositiveIntegerField() projects = models.ManyToManyField('sentry.Project', through=EnvironmentProject) project_id = BoundedPositiveIntegerField(null=True) name = models.CharField(max_length=64) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_environment' unique_together = ( ('project_id', 'name'), ('organization_id', 'name'), ) __repr__ = sane_repr('organization_id', 'name') @classmethod def get_cache_key(cls, organization_id, name): return 'env:2:%s:%s' % (organization_id, md5_text(name).hexdigest()) @classmethod def get_name_or_default(cls, name): return name or '' @classmethod def get_for_organization_id(cls, organization_id, name): name = cls.get_name_or_default(name) cache_key = cls.get_cache_key(organization_id, name) env = cache.get(cache_key) if env is None: env = cls.objects.get( name=name, organization_id=organization_id, ) cache.set(cache_key, env, 3600) return env @classmethod def get_or_create(cls, project, name): name = cls.get_name_or_default(name) cache_key = cls.get_cache_key(project.organization_id, name) env = cache.get(cache_key) if env is None: env = cls.objects.get_or_create( name=name, organization_id=project.organization_id, )[0] cache.set(cache_key, env, 3600) env.add_project(project) return env def add_project(self, project): try: with transaction.atomic(): EnvironmentProject.objects.create(project=project, environment=self) except IntegrityError: pass
class TagValue(Model): """ Stores references to available filters. """ __core__ = False project_id = BoundedBigIntegerField(db_index=True) _key = FlexibleForeignKey('tagstore.TagKey', db_column='key_id') value = models.CharField(max_length=MAX_TAG_VALUE_LENGTH) data = GzippedDictField(blank=True, null=True) times_seen = BoundedPositiveIntegerField(default=0) last_seen = models.DateTimeField( default=timezone.now, db_index=True, null=True) first_seen = models.DateTimeField( default=timezone.now, db_index=True, null=True) objects = TagStoreManager() class Meta: app_label = 'tagstore' unique_together = (('project_id', '_key', 'value'), ) index_together = (('project_id', '_key', 'last_seen'), ) __repr__ = sane_repr('project_id', '_key_id', 'value') def delete(self): using = router.db_for_read(TagValue) cursor = connections[using].cursor() cursor.execute( """ DELETE FROM tagstore_tagvalue WHERE project_id = %s AND id = %s """, [self.project_id, self.id] ) @property def key(self): if hasattr(self, '_set_key'): return self._set_key if hasattr(self, '__key_cache'): return self._key.key # fallback from sentry.tagstore.v2.models import TagKey tk = TagKey.objects.filter( project_id=self.project_id, id=self._key_id, ).values_list('key', flat=True).get() # cache for future calls self.key = tk return tk @key.setter def key(self, key): self._set_key = key def get_label(self): from sentry import tagstore return tagstore.get_tag_value_label(self.key, self.value) @classmethod def get_cache_key(cls, project_id, _key_id, value): return 'tagvalue:1:%s:%s:%s' % (project_id, _key_id, md5_text(value).hexdigest()) @classmethod def get_or_create(cls, project_id, _key_id, value, **kwargs): cache_key = cls.get_cache_key(project_id, _key_id, value) rv = cache.get(cache_key) created = False if rv is None: rv, created = cls.objects.get_or_create( project_id=project_id, _key_id=_key_id, value=value, **kwargs ) cache.set(cache_key, rv, 3600) return rv, created @classmethod def get_or_create_bulk(cls, project_id, tags): # Attempt to create a bunch of models in one big batch with as few # queries and cache calls as possible. # In best case, this is all done in 1 cache get. # If we miss cache hit here, we have to fall back to old behavior. key_to_model = {tag: None for tag in tags} tags_by_key_id = {tag[0].id: tag for tag in tags} remaining_keys = set(tags) # First attempt to hit from cache, which in theory is the hot case cache_key_to_key = {cls.get_cache_key(project_id, tk.id, v): (tk, v) for tk, v in tags} cache_key_to_models = cache.get_many(cache_key_to_key.keys()) for model in cache_key_to_models.values(): key_to_model[tags_by_key_id[model._key_id]] = model remaining_keys.remove(tags_by_key_id[model._key_id]) if not remaining_keys: # 100% cache hit on all items, good work team return key_to_model # Fall back to just doing it manually # Further optimizations start to become not so great. # For some reason, when trying to do a bulk SELECT with all of the # key value pairs in big OR ends up using the wrong index and ultimating # generating a significantly less efficient query. The only alternative is to # splice this up a bit and do all of the SELECTs, then do a bulk INSERT for remaining for key in remaining_keys: key_to_model[key] = cls.get_or_create(project_id, key[0].id, key[1])[0] return key_to_model
class Organization(Model): """ An organization represents a group of individuals which maintain ownership of projects. """ __core__ = True name = models.CharField(max_length=64) slug = models.SlugField(unique=True) status = BoundedPositiveIntegerField( choices=OrganizationStatus.as_choices(), # south will generate a default value of `'<OrganizationStatus.ACTIVE: 0>'` # if `.value` is omitted default=OrganizationStatus.ACTIVE.value ) date_added = models.DateTimeField(default=timezone.now) members = models.ManyToManyField( settings.AUTH_USER_MODEL, through='sentry.OrganizationMember', related_name='org_memberships' ) default_role = models.CharField( choices=roles.get_choices(), max_length=32, default=roles.get_default().id, ) flags = BitField( flags=( ( 'allow_joinleave', 'Allow members to join and leave teams without requiring approval.' ), ( 'enhanced_privacy', 'Enable enhanced privacy controls to limit personally identifiable information (PII) as well as source code in things like notifications.' ), ( 'disable_shared_issues', 'Disable sharing of limited details on issues to anonymous users.' ), ( 'early_adopter', 'Enable early adopter status, gaining access to features prior to public release.' ), ( 'require_2fa', 'Require and enforce two-factor authentication for all members.' ), ( 'disable_new_visibility_features', 'Temporarily opt out of new visibility features and ui', ), ), default=1 ) objects = OrganizationManager(cache_fields=('pk', 'slug', )) class Meta: app_label = 'sentry' db_table = 'sentry_organization' __repr__ = sane_repr('owner_id', 'name', 'slug') @classmethod def get_default(cls): """ Return the organization used in single organization mode. """ return cls.objects.filter( status=OrganizationStatus.ACTIVE, )[0] def __unicode__(self): return u'%s (%s)' % (self.name, self.slug) def save(self, *args, **kwargs): if not self.slug: lock = locks.get('slug:organization', duration=5) with TimedRetryPolicy(10)(lock.acquire): slugify_instance(self, self.name, reserved=RESERVED_ORGANIZATION_SLUGS) super(Organization, self).save(*args, **kwargs) else: super(Organization, self).save(*args, **kwargs) def delete(self): if self.is_default: raise Exception('You cannot delete the the default organization.') return super(Organization, self).delete() @cached_property def is_default(self): if not settings.SENTRY_SINGLE_ORGANIZATION: return False return self == type(self).get_default() def has_access(self, user, access=None): queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) return queryset.exists() def get_audit_log_data(self): return { 'id': self.id, 'slug': self.slug, 'name': self.name, 'status': int(self.status), 'flags': int(self.flags), 'default_role': self.default_role, } def get_owners(self): from sentry.models import User return User.objects.filter( sentry_orgmember_set__role=roles.get_top_dog().id, sentry_orgmember_set__organization=self, is_active=True, ) def get_default_owner(self): if not hasattr(self, '_default_owner'): self._default_owner = self.get_owners()[0] return self._default_owner def has_single_owner(self): from sentry.models import OrganizationMember count = OrganizationMember.objects.filter( organization=self, role=roles.get_top_dog().id, user__isnull=False, user__is_active=True, )[:2].count() return count == 1 def merge_to(from_org, to_org): from sentry.models import ( ApiKey, AuditLogEntry, AuthProvider, Commit, OrganizationAvatar, OrganizationIntegration, OrganizationMember, OrganizationMemberTeam, Project, Release, ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseHeadCommit, Repository, Team, Environment, ) for from_member in OrganizationMember.objects.filter( organization=from_org, user__isnull=False ): logger = logging.getLogger('sentry.merge') try: to_member = OrganizationMember.objects.get( organization=to_org, user=from_member.user, ) except OrganizationMember.DoesNotExist: from_member.update(organization=to_org) to_member = from_member else: qs = OrganizationMemberTeam.objects.filter( organizationmember=from_member, is_active=True, ).select_related() for omt in qs: OrganizationMemberTeam.objects.create_or_update( organizationmember=to_member, team=omt.team, defaults={ 'is_active': True, }, ) logger.info('user.migrate', extra={ 'instance_id': from_member.id, 'new_member_id': to_member.id, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) for from_team in Team.objects.filter(organization=from_org): try: with transaction.atomic(): from_team.update(organization=to_org) except IntegrityError: slugify_instance(from_team, from_team.name, organization=to_org) from_team.update( organization=to_org, slug=from_team.slug, ) logger.info('team.migrate', extra={ 'instance_id': from_team.id, 'new_slug': from_team.slug, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) for from_project in Project.objects.filter(organization=from_org): try: with transaction.atomic(): from_project.update(organization=to_org) except IntegrityError: slugify_instance( from_project, from_project.name, organization=to_org, reserved=RESERVED_PROJECT_SLUGS) from_project.update( organization=to_org, slug=from_project.slug, ) logger.info('project.migrate', extra={ 'instance_id': from_project.id, 'new_slug': from_project.slug, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) # TODO(jess): update this when adding unique constraint # on version, organization for releases for from_release in Release.objects.filter(organization=from_org): try: to_release = Release.objects.get(version=from_release.version, organization=to_org) except Release.DoesNotExist: Release.objects.filter(id=from_release.id).update(organization=to_org) else: Release.merge(to_release, [from_release]) logger.info('release.migrate', extra={ 'instance_id': from_release.id, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) def do_update(queryset, params): model_name = queryset.model.__name__.lower() try: with transaction.atomic(): queryset.update(**params) except IntegrityError: for instance in queryset: try: with transaction.atomic(): instance.update(**params) except IntegrityError: logger.info('{}.migrate-skipped'.format(model_name), extra={ 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) else: logger.info('{}.migrate'.format(model_name), extra={ 'instance_id': instance.id, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) else: logger.info('{}.migrate'.format(model_name), extra={ 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) INST_MODEL_LIST = ( AuthProvider, ApiKey, AuditLogEntry, OrganizationAvatar, OrganizationIntegration, ReleaseEnvironment, ReleaseFile, ) ATTR_MODEL_LIST = ( Commit, ReleaseCommit, ReleaseHeadCommit, Repository, Environment, ) for model in INST_MODEL_LIST: queryset = model.objects.filter( organization=from_org, ) do_update(queryset, {'organization': to_org}) for model in ATTR_MODEL_LIST: queryset = model.objects.filter( organization_id=from_org.id, ) do_update(queryset, {'organization_id': to_org.id}) # TODO: Make these a mixin def update_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.set_value(self, *args, **kwargs) def get_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.get_value(self, *args, **kwargs) def delete_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.unset_value(self, *args, **kwargs) def send_delete_confirmation(self, audit_log_entry, countdown): from sentry import options from sentry.utils.email import MessageBuilder owners = self.get_owners() context = { 'organization': self, 'audit_log_entry': audit_log_entry, 'eta': timezone.now() + timedelta(seconds=countdown), 'url': absolute_uri(reverse( 'sentry-restore-organization', args=[self.slug], )), } MessageBuilder( subject='%sOrganization Queued for Deletion' % (options.get('mail.subject-prefix'), ), template='sentry/emails/org_delete_confirm.txt', html_template='sentry/emails/org_delete_confirm.html', type='org.confirm_delete', context=context, ).send_async([o.email for o in owners]) def flag_has_changed(self, flag_name): "Returns ``True`` if ``flag`` has changed since initialization." return getattr(self.old_value('flags'), flag_name, None) != getattr(self.flags, flag_name) def handle_2fa_required(self, request): from sentry.models import ApiKey from sentry.tasks.auth import remove_2fa_non_compliant_members actor_id = request.user.id if request.user and request.user.is_authenticated() else None api_key_id = request.auth.id if hasattr( request, 'auth') and isinstance( request.auth, ApiKey) else None ip_address = request.META['REMOTE_ADDR'] remove_2fa_non_compliant_members.delay( self.id, actor_id=actor_id, actor_key_id=api_key_id, ip_address=ip_address ) def get_url_viewname(self): return 'sentry-organization-issue-list' def get_url(self): return reverse(self.get_url_viewname(), args=[self.slug])
class Activity(Model): SET_RESOLVED = 1 SET_UNRESOLVED = 2 SET_MUTED = 3 SET_PUBLIC = 4 SET_PRIVATE = 5 SET_REGRESSION = 6 CREATE_ISSUE = 7 NOTE = 8 FIRST_SEEN = 9 DEPLOY = 10 TYPE = ( # (TYPE, verb-slug) (SET_RESOLVED, 'set_resolved'), (SET_UNRESOLVED, 'set_unresolved'), (SET_MUTED, 'set_muted'), (SET_PUBLIC, 'set_public'), (SET_PRIVATE, 'set_private'), (SET_REGRESSION, 'set_regression'), (CREATE_ISSUE, 'create_issue'), (NOTE, 'note'), (FIRST_SEEN, 'first_seen'), (DEPLOY, 'deploy'), ) project = models.ForeignKey('sentry.Project') group = models.ForeignKey('sentry.Group', null=True) event = models.ForeignKey('sentry.Event', null=True) # index on (type, ident) type = BoundedPositiveIntegerField(choices=TYPE) ident = models.CharField(max_length=64, null=True) # if the user is not set, it's assumed to be the system user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True) datetime = models.DateTimeField(default=timezone.now) data = GzippedDictField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_activity' __repr__ = sane_repr('project_id', 'group_id', 'event_id', 'user_id', 'type', 'ident') def save(self, *args, **kwargs): created = bool(not self.id) super(Activity, self).save(*args, **kwargs) if not created: return # HACK: support Group.num_comments if self.type == Activity.NOTE: self.group.update(num_comments=F('num_comments') + 1) if self.event: self.event.update(num_comments=F('num_comments') + 1) def send_notification(self): from sentry.models import User, UserOption, ProjectOption from sentry.utils.email import MessageBuilder, group_id_to_email if self.type != Activity.NOTE or not self.group: return # TODO(dcramer): some of this logic is duplicated in NotificationPlugin # fetch access group members user_id_list = set( User.objects.filter(accessgroup__projects=self.project, is_active=True).exclude( id=self.user_id, ).values_list('id', flat=True)) if self.project.team: # fetch team members user_id_list |= set( u_id for u_id in self.project.team.member_set.filter( user__is_active=True, ).exclude(user__id=self.user_id, ). values_list('user', flat=True)) if not user_id_list: return disabled = set( UserOption.objects.filter( user__in=user_id_list, key='subscribe_notes', value=u'0', ).values_list('user', flat=True)) send_to = filter(lambda u_id: u_id not in disabled, user_id_list) if not send_to: return author = self.user.first_name or self.user.username subject_prefix = ProjectOption.objects.get_value( self.project, 'subject_prefix', settings.EMAIL_SUBJECT_PREFIX) if subject_prefix: subject_prefix = subject_prefix.rstrip() + ' ' subject = '%s%s' % (subject_prefix, self.group.get_email_subject()) context = { 'text': self.data['text'], 'author': author, 'group': self.group, 'link': self.group.get_absolute_url(), } headers = { 'X-Sentry-Reply-To': group_id_to_email(self.group.pk), } msg = MessageBuilder( subject=subject, context=context, template='sentry/emails/new_note.txt', html_template='sentry/emails/new_note.html', headers=headers, reference=self, reply_reference=self.group, ) msg.add_users(send_to, project=self.project) try: msg.send() except Exception as e: logger = logging.getLogger('sentry.mail.errors') logger.exception(e)
class GroupRelease(Model): __core__ = False # TODO: Should be BoundedBigIntegerField project_id = BoundedPositiveIntegerField(db_index=True) group_id = BoundedBigIntegerField() # TODO: Should be BoundedBigIntegerField release_id = BoundedPositiveIntegerField(db_index=True) environment = models.CharField(max_length=64, default="") first_seen = models.DateTimeField(default=timezone.now) last_seen = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = "sentry" db_table = "sentry_grouprelease" unique_together = (("group_id", "release_id", "environment"),) __repr__ = sane_repr("group_id", "release_id") @classmethod def get_cache_key(cls, group_id, release_id, environment): return "grouprelease:1:{}:{}".format( group_id, md5_text(f"{release_id}:{environment}").hexdigest() ) @classmethod def get_or_create(cls, group, release, environment, datetime, **kwargs): cache_key = cls.get_cache_key(group.id, release.id, environment.name) instance = cache.get(cache_key) if instance is None: try: with transaction.atomic(): instance, created = ( cls.objects.create( release_id=release.id, group_id=group.id, environment=environment.name, project_id=group.project_id, first_seen=datetime, last_seen=datetime, ), True, ) except IntegrityError: instance, created = ( cls.objects.get( release_id=release.id, group_id=group.id, environment=environment.name ), False, ) cache.set(cache_key, instance, 3600) else: created = False # TODO(dcramer): this would be good to buffer, but until then we minimize # updates to once a minute, and allow Postgres to optimistically skip # it even if we can't if not created and instance.last_seen < datetime - timedelta(seconds=60): cls.objects.filter( id=instance.id, last_seen__lt=datetime - timedelta(seconds=60) ).update(last_seen=datetime) instance.last_seen = datetime cache.set(cache_key, instance, 3600) return instance
class Integration(Model): __core__ = False organizations = models.ManyToManyField("sentry.Organization", related_name="integrations", through=OrganizationIntegration) projects = models.ManyToManyField("sentry.Project", related_name="integrations", through=ProjectIntegration) provider = models.CharField(max_length=64) external_id = models.CharField(max_length=64) name = models.CharField(max_length=200) # metadata might be used to store things like credentials, but it should NOT # be used to store organization-specific information, as the Integration # instance is shared among multiple organizations metadata = EncryptedJsonField(default=dict) status = BoundedPositiveIntegerField(default=ObjectStatus.VISIBLE, choices=ObjectStatus.as_choices(), null=True) date_added = models.DateTimeField(default=timezone.now, null=True) class Meta: app_label = "sentry" db_table = "sentry_integration" unique_together = (("provider", "external_id"), ) def get_provider(self): from sentry import integrations return integrations.get(self.provider) def get_installation(self, organization_id, **kwargs): return self.get_provider().get_installation(self, organization_id, **kwargs) def has_feature(self, feature): return feature in self.get_provider().features def add_organization(self, organization, user=None, default_auth_id=None): """ Add an organization to this integration. Returns False if the OrganizationIntegration was not created """ try: org_integration, created = OrganizationIntegration.objects.get_or_create( organization_id=organization.id, integration_id=self.id, defaults={ "default_auth_id": default_auth_id, "config": {} }, ) if not created and default_auth_id: org_integration.update(default_auth_id=default_auth_id) except IntegrityError: return False else: integration_added.send_robust(integration=self, organization=organization, user=user, sender=self.__class__) return org_integration
class FileBlob(Model): __core__ = False storage = models.CharField(max_length=128) storage_options = JSONField() path = models.TextField(null=True) size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, unique=True) timestamp = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = 'sentry' db_table = 'sentry_fileblob' @classmethod def from_file(cls, fileobj): """ Retrieve a FileBlob instance for the given file. If not already present, this will cause it to be stored. >>> blob = FileBlob.from_file(fileobj) """ size = 0 checksum = sha1('') for chunk in fileobj: size += len(chunk) checksum.update(chunk) checksum = checksum.hexdigest() lock_key = 'fileblob:upload:{}'.format(checksum) # TODO(dcramer): the database here is safe, but if this lock expires # and duplicate files are uploaded then we need to prune one with Lock(lock_key, timeout=600): # test for presence try: existing = FileBlob.objects.get(checksum=checksum) except FileBlob.DoesNotExist: pass else: return existing blob = cls( size=size, checksum=checksum, storage=settings.SENTRY_FILESTORE, storage_options=settings.SENTRY_FILESTORE_OPTIONS, ) blob.path = cls.generate_unique_path(blob.timestamp) storage = blob.get_storage() storage.save(blob.path, fileobj) blob.save() metrics.timing('filestore.blob-size', blob.size) return blob @classmethod def generate_unique_path(cls, timestamp): pieces = map(str, divmod(int(timestamp.strftime('%s')), ONE_DAY)) pieces.append('%s' % (uuid4().hex, )) return '/'.join(pieces) def delete(self, *args, **kwargs): if self.path: self.deletefile(commit=False) super(FileBlob, self).delete(*args, **kwargs) def get_storage(self): backend = self.storage options = self.storage_options storage = get_storage_class(backend) return storage(**options) def deletefile(self, commit=False): assert self.path storage = self.get_storage() storage.delete(self.path) self.path = None if commit: self.save() def getfile(self): """ Return a file-like object for this File's content. >>> with blob.getfile() as src, open('/tmp/localfile', 'wb') as dst: >>> for chunk in src.chunks(): >>> dst.write(chunk) """ assert self.path storage = self.get_storage() return storage.open(self.path)
class Integration(Model): __core__ = False organizations = models.ManyToManyField('sentry.Organization', related_name='integrations', through=OrganizationIntegration) projects = models.ManyToManyField('sentry.Project', related_name='integrations', through=ProjectIntegration) provider = models.CharField(max_length=64) external_id = models.CharField(max_length=64) name = models.CharField(max_length=200) # metadata might be used to store things like credentials, but it should NOT # be used to store organization-specific information, as the Integration # instance is shared among multiple organizations metadata = EncryptedJsonField(default=dict) status = BoundedPositiveIntegerField( default=ObjectStatus.VISIBLE, choices=ObjectStatus.as_choices(), null=True, ) date_added = models.DateTimeField(default=timezone.now, null=True) class Meta: app_label = 'sentry' db_table = 'sentry_integration' unique_together = (('provider', 'external_id'), ) def get_provider(self): from sentry import integrations return integrations.get(self.provider) def get_installation(self, organization_id, **kwargs): return self.get_provider().get_installation(self, organization_id, **kwargs) def has_feature(self, feature): return feature in self.get_provider().features def add_organization(self, organization_id, default_auth_id=None, config=None): """ Add an organization to this integration. Returns False if the OrganizationIntegration was not created """ try: with transaction.atomic(): return OrganizationIntegration.objects.create( organization_id=organization_id, integration_id=self.id, default_auth_id=default_auth_id, config=config or {}, ) except IntegrityError: return False else: analytics.record( 'integration.added', provider=self.provider, id=self.id, organization_id=organization_id, )
class ExportedData(Model): """ Stores references to asynchronous data export jobs """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.SET_NULL) file = FlexibleForeignKey("sentry.File", null=True, db_constraint=False, on_delete=models.SET_NULL) date_added = models.DateTimeField(default=timezone.now) date_finished = models.DateTimeField(null=True) date_expired = models.DateTimeField(null=True, db_index=True) query_type = BoundedPositiveIntegerField( choices=ExportQueryType.as_choices()) query_info = JSONField() @property def status(self): if self.date_finished is None: return ExportStatus.Early elif self.date_expired < timezone.now(): return ExportStatus.Expired else: return ExportStatus.Valid @property def payload(self): payload = self.query_info.copy() payload["export_type"] = ExportQueryType.as_str(self.query_type) return payload @property def file_name(self): date = self.date_added.strftime("%Y-%B-%d") export_type = ExportQueryType.as_str(self.query_type) # Example: Discover_2020-July-21_27.csv return "{}_{}_{}.csv".format(export_type, date, self.id) @staticmethod def format_date(date): # Example: 12:21 PM on July 21, 2020 (UTC) return None if date is None else date.strftime( "%-I:%M %p on %B %d, %Y (%Z)") def delete_file(self): if self.file: self.file.delete() def delete(self, *args, **kwargs): self.delete_file() super(ExportedData, self).delete(*args, **kwargs) def finalize_upload(self, file, expiration=DEFAULT_EXPIRATION): self.delete_file() # If a file is present, remove it current_time = timezone.now() expire_time = current_time + expiration self.update(file=file, date_finished=current_time, date_expired=expire_time) self.email_success() def email_success(self): from sentry.utils.email import MessageBuilder # The following condition should never be true, but it's a safeguard in case someone manually calls this method if self.date_finished is None or self.date_expired is None or self.file is None: logger.warning( "Notification email attempted on incomplete dataset", extra={ "data_export_id": self.id, "organization_id": self.organization_id }, ) return url = absolute_uri( reverse("sentry-data-export-details", args=[self.organization.slug, self.id])) msg = MessageBuilder( subject="Your data is ready.", context={ "url": url, "expiration": self.format_date(self.date_expired) }, type="organization.export-data", template="sentry/emails/data-export-success.txt", html_template="sentry/emails/data-export-success.html", ) msg.send_async([self.user.email]) metrics.incr("dataexport.end", tags={"success": True}, sample_rate=1.0) def email_failure(self, message): from sentry.utils.email import MessageBuilder msg = MessageBuilder( subject="We couldn't export your data.", context={ "creation": self.format_date(self.date_added), "error_message": message, "payload": json.dumps(self.payload, indent=2, sort_keys=True), }, type="organization.export-data", template="sentry/emails/data-export-failure.txt", html_template="sentry/emails/data-export-failure.html", ) msg.send_async([self.user.email]) metrics.incr("dataexport.end", tags={"success": False}, sample_rate=1.0) self.delete() class Meta: app_label = "sentry" db_table = "sentry_exporteddata" __repr__ = sane_repr("query_type", "query_info")
class GroupSnooze(Model): """ A snooze marks an issue as ignored until a condition is hit. - If ``until`` is set, the snooze is lifted at the given datetime. - If ``count`` is set, the snooze is lifted when total occurances match. - If ``window`` is set (in addition to count), the snooze is lifted when the rate of events matches. - If ``user_count`` is set, the snooze is lfited when unique users match. - If ``user_window`` is set (in addition to count), the snooze is lifted when the rate unique users matches. NOTE: `window` and `user_window` are specified in minutes """ __core__ = False group = models.OneToOneField('sentry.Group') until = models.DateTimeField(null=True) count = BoundedPositiveIntegerField(null=True) window = BoundedPositiveIntegerField(null=True) user_count = BoundedPositiveIntegerField(null=True) user_window = BoundedPositiveIntegerField(null=True) state = JSONField(null=True) actor_id = BoundedPositiveIntegerField(null=True) objects = BaseManager(cache_fields=('group', )) class Meta: db_table = 'sentry_groupsnooze' app_label = 'sentry' __repr__ = sane_repr('group_id') def is_valid(self, group=None, test_rates=False): if group is None: group = self.group elif group.id != self.group_id: raise ValueError if self.until: if self.until <= timezone.now(): return False if self.count: if self.window: if test_rates: if not self.test_frequency_rates(): return False elif self.count <= group.times_seen - self.state['times_seen']: return False if self.user_count and test_rates: if self.user_window: if not self.test_user_rates(): return False elif self.user_count <= group.count_users_seen( ) - self.state['users_seen']: return False return True def test_frequency_rates(self): from sentry import tsdb end = timezone.now() start = end - timedelta(minutes=self.window) rate = tsdb.get_sums( model=tsdb.models.group, keys=[self.group_id], start=start, end=end, )[self.group_id] if rate >= self.count: return False return True def test_user_rates(self): from sentry import tsdb end = timezone.now() start = end - timedelta(minutes=self.user_window) rate = tsdb.get_distinct_counts_totals( model=tsdb.models.users_affected_by_group, keys=[self.group_id], start=start, end=end, )[self.group_id] if rate >= self.user_count: return False return True
class Release(Model): """ A release is generally created when a new version is pushed into a production state. A commit is generally a git commit. See also releasecommit.py """ __include_in_export__ = False organization = FlexibleForeignKey("sentry.Organization") projects = models.ManyToManyField( "sentry.Project", related_name="releases", through=ReleaseProject ) status = BoundedPositiveIntegerField( default=ReleaseStatus.OPEN, null=True, choices=( (ReleaseStatus.OPEN, _("Open")), (ReleaseStatus.ARCHIVED, _("Archived")), ), ) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) # new issues (groups) that arise as a consequence of this release new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey("sentry.User", null=True, blank=True, on_delete=models.SET_NULL) # materialized stats commit_count = BoundedPositiveIntegerField(null=True, default=0) last_commit_id = BoundedBigIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True, default=0) last_deploy_id = BoundedPositiveIntegerField(null=True) # Denormalized semver columns. These will be filled if `version` matches at least # part of our more permissive model of semver: # `<package>@<major>.<minor>.<patch>.<revision>-<prerelease>+<build_code> package = models.TextField(null=True) major = models.BigIntegerField(null=True) minor = models.BigIntegerField(null=True) patch = models.BigIntegerField(null=True) revision = models.BigIntegerField(null=True) prerelease = models.TextField(null=True) build_code = models.TextField(null=True) # If `build_code` can be parsed as a 64 bit int we'll store it here as well for # sorting/comparison purposes build_number = models.BigIntegerField(null=True) # HACK HACK HACK # As a transitionary step we permit release rows to exist multiple times # where they are "specialized" for a specific project. The goal is to # later split up releases by project again. This is for instance used # by the org release listing. _for_project_id = None # Custom Model Manager required to override create method objects = ReleaseModelManager() class Meta: app_label = "sentry" db_table = "sentry_release" unique_together = (("organization", "version"),) # TODO(django2.2): Note that we create this index with each column ordered # descending. Django 2.2 allows us to specify functional indexes, which should # allow us to specify this on the model. # We also use a functional index to order `prerelease` according to semver rules, # which we can't express here for now. index_together = ( ("organization", "package", "major", "minor", "patch", "revision", "prerelease"), ("organization", "major", "minor", "patch", "revision", "prerelease"), ("organization", "build_code"), ("organization", "build_number"), ("organization", "date_added"), ("organization", "status"), ) __repr__ = sane_repr("organization_id", "version") SEMVER_COLS = ["major", "minor", "patch", "revision", "prerelease_case", "prerelease"] def __eq__(self, other): """Make sure that specialized releases are only comparable to the same other specialized release. This for instance lets us treat them separately for serialization purposes. """ return Model.__eq__(self, other) and self._for_project_id == other._for_project_id def __hash__(self): # https://code.djangoproject.com/ticket/30333 return super().__hash__() @staticmethod def is_valid_version(value): return not ( not value or any(c in value for c in BAD_RELEASE_CHARS) or value in (".", "..") or value.lower() == "latest" ) @property def is_semver_release(self): return self.package is not None @staticmethod def is_semver_version(version): """ Method that checks if a version follows semantic versioning """ if not Release.is_valid_version(version): return False # Release name has to contain package_name to be parsed correctly by parse_release version = version if "@" in version else f"{SEMVER_FAKE_PACKAGE}@{version}" try: version_info = parse_release(version) version_parsed = version_info.get("version_parsed") return version_parsed is not None and all( validate_bigint(version_parsed[field]) for field in ("major", "minor", "patch", "revision") ) except RelayError: # This can happen on invalid legacy releases return False @classmethod def get_cache_key(cls, organization_id, version): return f"release:3:{organization_id}:{md5_text(version).hexdigest()}" @classmethod def get_lock_key(cls, organization_id, release_id): return f"releasecommits:{organization_id}:{release_id}" @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): with metrics.timer("models.release.get_or_create") as metric_tags: return cls._get_or_create_impl(project, version, date_added, metric_tags) @classmethod def _get_or_create_impl(cls, project, version, date_added, metric_tags): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = (f"{project.slug}-{version}")[:DB_VERSION_LENGTH] releases = list( cls.objects.filter( organization_id=project.organization_id, version__in=[version, project_version], projects=project, ) ) if releases: try: release = [r for r in releases if r.version == project_version][0] except IndexError: release = releases[0] metric_tags["created"] = "false" else: try: with atomic_transaction(using=router.db_for_write(cls)): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) metric_tags["created"] = "true" except IntegrityError: metric_tags["created"] = "false" release = cls.objects.get( organization_id=project.organization_id, version=version ) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F("flags").bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) metric_tags["cache_hit"] = "false" else: metric_tags["cache_hit"] = "true" return release @cached_property def version_info(self): try: return parse_release(self.version) except RelayError: # This can happen on invalid legacy releases return None @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import ( Group, GroupRelease, GroupResolution, ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, ) model_list = ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution, ) for release in from_releases: for model in model_list: if hasattr(model, "release"): update_kwargs = {"release": to_release} else: update_kwargs = {"release_id": to_release.id} try: with atomic_transaction(using=router.db_for_write(model)): model.objects.filter(release_id=release.id).update(**update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with atomic_transaction(using=router.db_for_write(model)): model.objects.filter(id=item.id).update(**update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update(first_release=to_release) release.delete() def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create( release=self, name=name, defaults={"date_added": date_added, "organization_id": self.organization_id}, )[0] def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with atomic_transaction(using=router.db_for_write(ReleaseProject)): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F("flags").bitor(Project.flags.has_releases)) except IntegrityError: return False else: return True def handle_commit_ranges(self, refs): """ Takes commit refs of the form: [ { 'previousCommit': None, 'commit': 'previous_commit..commit', } ] Note: Overwrites 'previousCommit' and 'commit' """ for ref in refs: if COMMIT_RANGE_DELIMITER in ref["commit"]: ref["previousCommit"], ref["commit"] = ref["commit"].split(COMMIT_RANGE_DELIMITER) def set_refs(self, refs, user, fetch=False): with sentry_sdk.start_span(op="set_refs"): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = ( type(self) .objects.filter( organization_id=self.organization_id, projects__in=self.projects.all() ) .extra(select={"sort": "COALESCE(date_released, date_added)"}) .exclude(version=self.version) .order_by("-sort") .first() ) names = {r["repository"] for r in refs} repos = list( Repository.objects.filter(organization_id=self.organization_id, name__in=names) ) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository("Invalid repository names: %s" % ",".join(invalid_repos)) self.handle_commit_ranges(refs) for ref in refs: repo = repos_by_name[ref["repository"]] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref["commit"] )[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={"commit": commit}, ) if fetch: fetch_commits.apply_async( kwargs={ "release_id": self.id, "user_id": user.id, "refs": refs, "prev_release_id": prev_release and prev_release.id, } ) def set_commits(self, commit_list): """ Bind a list of commits to this release. This will clear any existing commit log and replace it with the given commits. """ # Sort commit list in reverse order commit_list.sort(key=lambda commit: commit.get("timestamp", 0), reverse=True) # TODO(dcramer): this function could use some cleanup/refactoring as it's a bit unwieldy from sentry.models import ( Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, PullRequest, ReleaseCommit, ReleaseHeadCommit, Repository, ) from sentry.plugins.providers.repository import RepositoryProvider from sentry.tasks.integrations import kick_off_status_syncs # todo(meredith): implement for IntegrationRepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get("message", "")) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) if lock.locked(): # Signal failure to the consumer rapidly. This aims to prevent the number # of timeouts and prevent web worker exhaustion when customers create # the same release rapidly for different projects. raise ReleaseCommitError with TimedRetryPolicy(10)(lock.acquire): start = time() with atomic_transaction( using=( router.db_for_write(type(self)), router.db_for_write(ReleaseCommit), router.db_for_write(Repository), router.db_for_write(CommitAuthor), router.db_for_write(Commit), ) ): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter(release=self).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get("repository") or f"organization-{self.organization_id}" if repo_name not in repos: repos[repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name )[0] else: repo = repos[repo_name] author_email = data.get("author_email") if author_email is None and data.get("author_name"): author_email = ( re.sub(r"[^a-zA-Z0-9\-_\.]*", "", data["author_name"]).lower() + "@localhost" ) author_email = truncatechars(author_email, 75) if not author_email: author = None elif author_email not in authors: author_data = {"name": data.get("author_name")} author, created = CommitAuthor.objects.get_or_create( organization_id=self.organization_id, email=author_email, defaults=author_data, ) if author.name != author_data["name"]: author.update(name=author_data["name"]) authors[author_email] = author else: author = authors[author_email] commit_data = {} # Update/set message and author if they are provided. if author is not None: commit_data["author"] = author if "message" in data: commit_data["message"] = data["message"] if "timestamp" in data: commit_data["date_added"] = data["timestamp"] commit, created = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=data["id"], defaults=commit_data, ) if not created: commit_data = { key: value for key, value in commit_data.items() if getattr(commit, key) != value } if commit_data: commit.update(**commit_data) if author is None: author = commit.author commit_author_by_commit[commit.id] = author # Guard against patch_set being None patch_set = data.get("patch_set") or [] for patched_file in patch_set: try: with atomic_transaction(using=router.db_for_write(CommitFileChange)): CommitFileChange.objects.create( organization_id=self.organization.id, commit=commit, filename=patched_file["path"], type=patched_file["type"], ) except IntegrityError: pass try: with atomic_transaction(using=router.db_for_write(ReleaseCommit)): ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) except IntegrityError: pass if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ str(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False ) .values_list("commit__author_id", flat=True) .distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) metrics.timing("release.set_commits.duration", time() - start) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in head_commit_by_repo.items(): try: with atomic_transaction(using=router.db_for_write(ReleaseHeadCommit)): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list( ReleaseCommit.objects.filter(release=self) .select_related("commit") .values("commit_id", "commit__key") ) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc["commit_id"] for rc in release_commits], ).values_list("group_id", "linked_id") ) commit_group_authors = [ (cr[0], commit_author_by_commit.get(cr[1])) for cr in commit_resolutions # group_id ] pr_ids_by_merge_commit = list( PullRequest.objects.filter( merge_commit_sha__in=[rc["commit__key"] for rc in release_commits], organization_id=self.organization_id, ).values_list("id", flat=True) ) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list("group_id", "linked_id") ) pr_authors = list( PullRequest.objects.filter( id__in=[prr[1] for prr in pull_request_resolutions] ).select_related("author") ) pr_authors_dict = {pra.id: pra.author for pra in pr_authors} pull_request_group_authors = [ (prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions ] user_by_author = {None: None} commits_and_prs = list(itertools.chain(commit_group_authors, pull_request_group_authors)) group_project_lookup = dict( Group.objects.filter(id__in=[group_id for group_id, _ in commits_and_prs]).values_list( "id", "project_id" ) ) for group_id, author in commits_and_prs: if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with atomic_transaction( using=( router.db_for_write(GroupResolution), router.db_for_write(Group), # inside the remove_group_from_inbox router.db_for_write(GroupInbox), router.db_for_write(Activity), ) ): GroupResolution.objects.create_or_update( group_id=group_id, values={ "release": self, "type": GroupResolution.Type.in_release, "status": GroupResolution.Status.resolved, "actor_id": actor.id if actor else None, }, ) group = Group.objects.get(id=group_id) group.update(status=GroupStatus.RESOLVED) remove_group_from_inbox(group, action=GroupInboxRemoveAction.RESOLVED, user=actor) metrics.incr("group.resolved", instance="in_commit", skip_internal=True) issue_resolved.send_robust( organization_id=self.organization_id, user=actor, group=group, project=group.project, resolution_type="with_commit", sender=type(self), ) kick_off_status_syncs.apply_async( kwargs={"project_id": group_project_lookup[group_id], "group_id": group_id} ) def safe_delete(self): """Deletes a release if possible or raises a `UnsafeReleaseDeletion` exception. """ from sentry.models import Group, ReleaseFile from sentry.snuba.sessions import check_has_health_data # we don't want to remove the first_release metadata on the Group, and # while people might want to kill a release (maybe to remove files), # removing the release is prevented if Group.objects.filter(first_release=self).exists(): raise UnsafeReleaseDeletion(ERR_RELEASE_REFERENCED) # We do not allow releases with health data to be deleted because # the upserting from snuba data would create the release again. # We would need to be able to delete this data from snuba which we # can't do yet. project_ids = list(self.projects.values_list("id").all()) if check_has_health_data([(p[0], self.version) for p in project_ids]): raise UnsafeReleaseDeletion(ERR_RELEASE_HEALTH_DATA) # TODO(dcramer): this needs to happen in the queue as it could be a long # and expensive operation file_list = ReleaseFile.objects.filter(release_id=self.id).select_related("file") for releasefile in file_list: releasefile.file.delete() releasefile.delete() self.delete() def count_artifacts(self): """Sum the artifact_counts of all release files. An artifact count of NULL is interpreted as 1. """ counts = get_artifact_counts([self.id]) return counts.get(self.id, 0)
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey('sentry.Project', null=True) logger = models.CharField(max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=( (GroupStatus.UNRESOLVED, _('Unresolved')), (GroupStatus.RESOLVED, _('Resolved')), (GroupStatus.IGNORED, _('Ignored')), ), db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey('sentry.Release', null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) # deprecated, do not use. GroupShare has superseded is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = (("can_view", "Can view"), ) index_together = (('project', 'first_release'), ) unique_together = (('project', 'short_id'), ) __repr__ = sane_repr('project_id') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) super(Group, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri( reverse('sentry-group', args=[self.organization.slug, self.project.slug, self.id])) @property def qualified_short_id(self): if self.short_id is not None: return '%s-%s' % ( self.project.slug.upper(), base32_encode(self.short_id), ) @property def event_set(self): from sentry.models import Event return Event.objects.filter(group_id=self.id) def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_ignored(self): return self.get_status() == GroupStatus.IGNORED # TODO(dcramer): remove in 9.0 / after plugins no long ref is_muted = is_ignored def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze status = self.status if status == GroupStatus.IGNORED: try: snooze = GroupSnooze.objects.get(group=self) except GroupSnooze.DoesNotExist: pass else: if not snooze.is_valid(group=self): status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return status def get_share_id(self): from sentry.models import GroupShare try: return GroupShare.objects.filter(group_id=self.id, ).values_list( 'uuid', flat=True)[0] except IndexError: # Otherwise it has not been shared yet. return None @classmethod def from_share_id(cls, share_id): if not share_id or len(share_id) != 32: raise cls.DoesNotExist from sentry.models import GroupShare return cls.objects.get(id=GroupShare.objects.filter( uuid=share_id, ).values_list('group_id'), ) def get_score(self): return int( math.log(self.times_seen) * 600 + float(time.mktime(self.last_seen.timetuple()))) def get_latest_event(self): from sentry.models import Event if not hasattr(self, '_latest_event'): latest_events = sorted( Event.objects.filter( group_id=self.id, ).order_by('-datetime')[0:5], key=EVENT_ORDERING_KEY, reverse=True, ) try: self._latest_event = latest_events[0] except IndexError: self._latest_event = None return self._latest_event def get_oldest_event(self): from sentry.models import Event if not hasattr(self, '_oldest_event'): oldest_events = sorted( Event.objects.filter( group_id=self.id, ).order_by('datetime')[0:5], key=EVENT_ORDERING_KEY, ) try: self._oldest_event = oldest_events[0] except IndexError: self._oldest_event = None return self._oldest_event def get_tags(self): if not hasattr(self, '_tag_cache'): group_tags = set([ gtk.key for gtk in tagstore.get_group_tag_keys( self.project_id, self.id, environment_id=None) ]) results = [] for key in group_tags: results.append({ 'key': key, 'label': tagstore.get_tag_key_label(key), }) self._tag_cache = sorted(results, key=lambda x: x['label']) return self._tag_cache def get_first_release(self): if self.first_release_id is None: return tagstore.get_first_release(self.id) return self.first_release.version def get_last_release(self): return tagstore.get_last_release(self.id) def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get('type', 'default') def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ etype = self.data.get('type') if etype is None: etype = 'default' if 'metadata' not in self.data: data = self.data.copy() if self.data else {} data['message'] = self.message return eventtypes.get(etype)(data).get_metadata() return self.data['metadata'] @property def title(self): et = eventtypes.get(self.get_event_type())(self.data) return et.to_string(self.get_event_metadata()) def error(self): warnings.warn('Group.error is deprecated, use Group.title', DeprecationWarning) return self.title error.short_description = _('error') @property def message_short(self): warnings.warn('Group.message_short is deprecated, use Group.title', DeprecationWarning) return self.title @property def organization(self): return self.project.organization @property def team(self): return self.project.team @property def checksum(self): warnings.warn('Group.checksum is no longer used', DeprecationWarning) return '' def get_email_subject(self): return '[%s] %s: %s' % (self.project.get_full_name().encode('utf-8'), six.text_type(self.get_level_display()).upper( ).encode('utf-8'), self.title.encode('utf-8')) def count_users_seen(self): return tagstore.get_group_values_seen(self.id, environment_id=None, key='sentry:user')[self.id]
class GroupResolution(Model): """ Describes when a group was marked as resolved. """ __core__ = False class Type: in_release = 0 in_next_release = 1 class Status: pending = 0 resolved = 1 group = FlexibleForeignKey('sentry.Group', unique=True) # the release in which its suggested this was resolved # which allows us to indicate if it still happens in newer versions release = FlexibleForeignKey('sentry.Release') type = BoundedPositiveIntegerField(choices=( (Type.in_next_release, 'in_next_release'), (Type.in_release, 'in_release'), ), null=True) actor_id = BoundedPositiveIntegerField(null=True) datetime = models.DateTimeField(default=timezone.now, db_index=True) status = BoundedPositiveIntegerField( default=Status.pending, choices=( (Status.pending, _('Pending')), (Status.resolved, _('Resolved')), ), ) class Meta: db_table = 'sentry_groupresolution' app_label = 'sentry' __repr__ = sane_repr('group_id', 'release_id') @classmethod def has_resolution(cls, group, release): """ Determine if a resolution exists for the given group and release. This is used to suggest if a regression has occurred. """ try: res_type, res_release, res_release_datetime = cls.objects.filter( group=group, ).select_related('release').values_list( 'type', 'release__id', 'release__date_added', )[0] except IndexError: return False # if no release is present, we assume we've gone from "no release" to "some release" # in application configuration, and thus this must be older if not release: return True if res_type in (None, cls.Type.in_next_release): if res_release == release.id: return True elif res_release_datetime > release.date_added: return True return False elif res_type == cls.Type.in_release: if res_release == release.id: return False if res_release_datetime < release.date_added: return False return True else: raise NotImplementedError
class Release(Model): """ A release is generally created when a new version is pushed into a production state. """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") projects = models.ManyToManyField( "sentry.Project", related_name="releases", through=ReleaseProject ) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey("sentry.User", null=True, blank=True, on_delete=models.SET_NULL) # materialized stats commit_count = BoundedPositiveIntegerField(null=True, default=0) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True, default=0) last_deploy_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = "sentry" db_table = "sentry_release" unique_together = (("organization", "version"),) __repr__ = sane_repr("organization_id", "version") @staticmethod def is_valid_version(value): return not ( any(c in value for c in BAD_RELEASE_CHARS) or value in (".", "..") or not value or value.lower() == "latest" ) @classmethod def get_cache_key(cls, organization_id, version): return "release:3:%s:%s" % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return u"releasecommits:{}:{}".format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ("%s-%s" % (project.slug, version))[:DB_VERSION_LENGTH] releases = list( cls.objects.filter( organization_id=project.organization_id, version__in=[version, project_version], projects=project, ) ) if releases: try: release = [r for r in releases if r.version == project_version][0] except IndexError: release = releases[0] else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) except IntegrityError: release = cls.objects.get( organization_id=project.organization_id, version=version ) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F("flags").bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) return release @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, Group, GroupRelease, GroupResolution, ) model_list = ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution, ) for release in from_releases: for model in model_list: if hasattr(model, "release"): update_kwargs = {"release": to_release} else: update_kwargs = {"release_id": to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update(**update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update(**update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update(first_release=to_release) release.delete() def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create( release=self, name=name, defaults={"date_added": date_added, "organization_id": self.organization_id}, )[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F("flags").bitor(Project.flags.has_releases)) except IntegrityError: return False else: return True def handle_commit_ranges(self, refs): """ Takes commit refs of the form: [ { 'previousCommit': None, 'commit': 'previous_commit..commit', } ] Note: Overwrites 'previousCommit' and 'commit' """ for ref in refs: if COMMIT_RANGE_DELIMITER in ref["commit"]: ref["previousCommit"], ref["commit"] = ref["commit"].split(COMMIT_RANGE_DELIMITER) def set_refs(self, refs, user, fetch=False): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = ( type(self) .objects.filter(organization_id=self.organization_id, projects__in=self.projects.all()) .extra(select={"sort": "COALESCE(date_released, date_added)"}) .exclude(version=self.version) .order_by("-sort") .first() ) names = {r["repository"] for r in refs} repos = list( Repository.objects.filter(organization_id=self.organization_id, name__in=names) ) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository("Invalid repository names: %s" % ",".join(invalid_repos)) self.handle_commit_ranges(refs) for ref in refs: repo = repos_by_name[ref["repository"]] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref["commit"] )[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={"commit": commit}, ) if fetch: fetch_commits.apply_async( kwargs={ "release_id": self.id, "user_id": user.id, "refs": refs, "prev_release_id": prev_release and prev_release.id, } ) def set_commits(self, commit_list): """ Bind a list of commits to this release. This will clear any existing commit log and replace it with the given commits. """ # Sort commit list in reverse order commit_list.sort(key=lambda commit: commit.get("timestamp"), reverse=True) # TODO(dcramer): this function could use some cleanup/refactoring as it's a bit unwieldy from sentry.models import ( Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest, ) from sentry.plugins.providers.repository import RepositoryProvider from sentry.tasks.integrations import kick_off_status_syncs # todo(meredith): implement for IntegrationRepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get("message", "")) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): start = time() with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important initial_commit_ids = set( ReleaseCommit.objects.filter(release=self).values_list("commit_id", flat=True) ) ReleaseCommit.objects.filter(release=self).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get("repository") or u"organization-{}".format( self.organization_id ) if repo_name not in repos: repos[repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name )[0] else: repo = repos[repo_name] author_email = data.get("author_email") if author_email is None and data.get("author_name"): author_email = ( re.sub(r"[^a-zA-Z0-9\-_\.]*", "", data["author_name"]).lower() + "@localhost" ) author_email = truncatechars(author_email, 75) if not author_email: author = None elif author_email not in authors: author_data = {"name": data.get("author_name")} author, created = CommitAuthor.objects.get_or_create( organization_id=self.organization_id, email=author_email, defaults=author_data, ) if author.name != author_data["name"]: author.update(name=author_data["name"]) authors[author_email] = author else: author = authors[author_email] commit_data = {} defaults = {} # Update/set message and author if they are provided. if author is not None: commit_data["author"] = author if "message" in data: commit_data["message"] = data["message"] if "timestamp" in data: commit_data["date_added"] = data["timestamp"] else: defaults["date_added"] = timezone.now() commit, created = Commit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, key=data["id"], defaults=defaults, values=commit_data, ) if not created: commit = Commit.objects.get( organization_id=self.organization_id, repository_id=repo.id, key=data["id"], ) if author is None: author = commit.author commit_author_by_commit[commit.id] = author patch_set = data.get("patch_set", []) for patched_file in patch_set: try: with transaction.atomic(): CommitFileChange.objects.create( organization_id=self.organization.id, commit=commit, filename=patched_file["path"], type=patched_file["type"], ) except IntegrityError: pass try: with transaction.atomic(): ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) except IntegrityError: pass if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False ) .values_list("commit__author_id", flat=True) .distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) metrics.timing("release.set_commits.duration", time() - start) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in six.iteritems(head_commit_by_repo): try: with transaction.atomic(): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list( ReleaseCommit.objects.filter(release=self) .select_related("commit") .values("commit_id", "commit__key") ) final_commit_ids = set(rc["commit_id"] for rc in release_commits) removed_commit_ids = initial_commit_ids - final_commit_ids added_commit_ids = final_commit_ids - initial_commit_ids if removed_commit_ids or added_commit_ids: release_commits_updated.send_robust( release=self, removed_commit_ids=removed_commit_ids, added_commit_ids=added_commit_ids, sender=self.__class__, ) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc["commit_id"] for rc in release_commits], ).values_list("group_id", "linked_id") ) commit_group_authors = [ (cr[0], commit_author_by_commit.get(cr[1])) for cr in commit_resolutions # group_id ] pr_ids_by_merge_commit = list( PullRequest.objects.filter( merge_commit_sha__in=[rc["commit__key"] for rc in release_commits], organization_id=self.organization_id, ).values_list("id", flat=True) ) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list("group_id", "linked_id") ) pr_authors = list( PullRequest.objects.filter( id__in=[prr[1] for prr in pull_request_resolutions] ).select_related("author") ) pr_authors_dict = {pra.id: pra.author for pra in pr_authors} pull_request_group_authors = [ (prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions ] user_by_author = {None: None} commits_and_prs = list(itertools.chain(commit_group_authors, pull_request_group_authors)) group_project_lookup = dict( Group.objects.filter(id__in=[group_id for group_id, _ in commits_and_prs]).values_list( "id", "project_id" ) ) for group_id, author in commits_and_prs: if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ "release": self, "type": GroupResolution.Type.in_release, "status": GroupResolution.Status.resolved, "actor_id": actor.id if actor else None, }, ) group = Group.objects.get(id=group_id) group.update(status=GroupStatus.RESOLVED) metrics.incr("group.resolved", instance="in_commit", skip_internal=True) issue_resolved.send_robust( organization_id=self.organization_id, user=actor, group=group, project=group.project, resolution_type="with_commit", sender=type(self), ) kick_off_status_syncs.apply_async( kwargs={"project_id": group_project_lookup[group_id], "group_id": group_id} )
class OrganizationMember(Model): """ Identifies relationships between teams and users. Users listed as team members are considered to have access to all projects and could be thought of as team owners (though their access level may not) be set to ownership. """ __core__ = True organization = FlexibleForeignKey('sentry.Organization', related_name="member_set") user = FlexibleForeignKey( settings.AUTH_USER_MODEL, null=True, blank=True, related_name="sentry_orgmember_set" ) email = models.EmailField(null=True, blank=True) role = models.CharField( choices=roles.get_choices(), max_length=32, default=roles.get_default().id, ) flags = BitField( flags=(('sso:linked', 'sso:linked'), ('sso:invalid', 'sso:invalid'), ), default=0 ) token = models.CharField(max_length=64, null=True, blank=True, unique=True) date_added = models.DateTimeField(default=timezone.now) has_global_access = models.BooleanField(default=True) teams = models.ManyToManyField( 'sentry.Team', blank=True, through='sentry.OrganizationMemberTeam' ) # Deprecated -- no longer used type = BoundedPositiveIntegerField(default=50, blank=True) class Meta: app_label = 'sentry' db_table = 'sentry_organizationmember' unique_together = (('organization', 'user'), ('organization', 'email'), ) __repr__ = sane_repr( 'organization_id', 'user_id', 'role', ) @transaction.atomic def save(self, *args, **kwargs): assert self.user_id or self.email, \ 'Must set user or email' super(OrganizationMember, self).save(*args, **kwargs) @property def is_pending(self): return self.user_id is None @property def legacy_token(self): checksum = md5() checksum.update(six.text_type(self.organization_id).encode('utf-8')) checksum.update(self.get_email().encode('utf-8')) checksum.update(force_bytes(settings.SECRET_KEY)) return checksum.hexdigest() def generate_token(self): return uuid4().hex + uuid4().hex def get_invite_link(self): if not self.is_pending: return None return absolute_uri( reverse( 'sentry-accept-invite', kwargs={ 'member_id': self.id, 'token': self.token or self.legacy_token, } ) ) def send_invite_email(self): from sentry.utils.email import MessageBuilder context = { 'email': self.email, 'organization': self.organization, 'url': self.get_invite_link(), } msg = MessageBuilder( subject='Join %s in using Sentry' % self.organization.name, template='sentry/emails/member-invite.txt', html_template='sentry/emails/member-invite.html', type='organization.invite', context=context, ) try: msg.send_async([self.get_email()]) except Exception as e: logger = get_logger(name='sentry.mail') logger.exception(e) def send_sso_link_email(self, configurer, provider): from sentry.utils.email import MessageBuilder link_args = {'organization_slug': self.organization.slug} context = { 'organization': self.organization, 'configurer': configurer, 'provider': provider, 'url': absolute_uri(reverse('sentry-auth-organization', kwargs=link_args)), } msg = MessageBuilder( subject='Action Required for %s' % (self.organization.name, ), template='sentry/emails/auth-link-identity.txt', html_template='sentry/emails/auth-link-identity.html', type='organization.auth_link', context=context, ) msg.send_async([self.get_email()]) def get_display_name(self): if self.user_id: return self.user.get_display_name() return self.email def get_label(self): if self.user_id: return self.user.get_label() return self.email or self.id def get_email(self): if self.user_id: return self.user.email return self.email def get_avatar_type(self): if self.user_id: return self.user.get_avatar_type() return 'letter_avatar' def get_audit_log_data(self): from sentry.models import Team return { 'email': self.email, 'user': self.user_id, 'teams': list( Team.objects.filter( id__in=OrganizationMemberTeam.objects.filter( organizationmember=self, is_active=True, ).values_list('team', flat=True) ) ), 'has_global_access': self.has_global_access, 'role': self.role, } def get_teams(self): from sentry.models import Team if roles.get(self.role).is_global: return self.organization.team_set.all() return Team.objects.filter( id__in=OrganizationMemberTeam.objects.filter( organizationmember=self, is_active=True, ).values('team') ) def get_scopes(self): return roles.get(self.role).scopes
class OrganizationOnboardingTask(Model): """ Onboarding tasks walk new Sentry orgs through basic features of Sentry. """ __core__ = False TASK_CHOICES = ( (OnboardingTask.FIRST_PROJECT, "create_project"), (OnboardingTask.FIRST_EVENT, "send_first_event"), (OnboardingTask.INVITE_MEMBER, "invite_member"), (OnboardingTask.SECOND_PLATFORM, "setup_second_platform"), (OnboardingTask.USER_CONTEXT, "setup_user_context"), (OnboardingTask.RELEASE_TRACKING, "setup_release_tracking"), (OnboardingTask.SOURCEMAPS, "setup_sourcemaps"), (OnboardingTask.USER_REPORTS, "setup_user_reports"), (OnboardingTask.ISSUE_TRACKER, "setup_issue_tracker"), (OnboardingTask.ALERT_RULE, "setup_alert_rules"), ) STATUS_CHOICES = ( (OnboardingTaskStatus.COMPLETE, "complete"), (OnboardingTaskStatus.PENDING, "pending"), (OnboardingTaskStatus.SKIPPED, "skipped"), ) # Used in the API to map IDs to string keys. This keeps things # a bit more maintainable on the frontend. TASK_KEY_MAP = dict(TASK_CHOICES) TASK_LOOKUP_BY_KEY = {v: k for k, v in TASK_CHOICES} STATUS_KEY_MAP = dict(STATUS_CHOICES) STATUS_LOOKUP_BY_KEY = {v: k for k, v in STATUS_CHOICES} # Tasks which must be completed for the onboarding to be considered # complete. REQUIRED_ONBOARDING_TASKS = frozenset([ OnboardingTask.FIRST_PROJECT, OnboardingTask.FIRST_EVENT, OnboardingTask.INVITE_MEMBER, OnboardingTask.SECOND_PLATFORM, OnboardingTask.USER_CONTEXT, OnboardingTask.RELEASE_TRACKING, OnboardingTask.SOURCEMAPS, OnboardingTask.ISSUE_TRACKER, OnboardingTask.ALERT_RULE, ]) SKIPPABLE_TASKS = frozenset([ OnboardingTask.INVITE_MEMBER, OnboardingTask.SECOND_PLATFORM, OnboardingTask.USER_CONTEXT, OnboardingTask.RELEASE_TRACKING, OnboardingTask.SOURCEMAPS, OnboardingTask.USER_REPORTS, OnboardingTask.ISSUE_TRACKER, OnboardingTask.ALERT_RULE, ]) organization = FlexibleForeignKey("sentry.Organization") user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True) # user that completed task = BoundedPositiveIntegerField(choices=TASK_CHOICES) status = BoundedPositiveIntegerField(choices=STATUS_CHOICES) date_completed = models.DateTimeField(default=timezone.now) project_id = BoundedBigIntegerField(blank=True, null=True) data = JSONField() # INVITE_MEMBER { invited_member: user.id } objects = OrganizationOnboardingTaskManager() class Meta: app_label = "sentry" db_table = "sentry_organizationonboardingtask" unique_together = (("organization", "task"), ) __repr__ = sane_repr("organization", "task")
class SentryApp(ParanoidModel, HasApiScopes): __core__ = True application = models.OneToOneField( 'sentry.ApiApplication', null=True, on_delete=models.SET_NULL, related_name='sentry_app', ) # Much of the OAuth system in place currently depends on a User existing. # This "proxy user" represents the SentryApp in those cases. proxy_user = models.OneToOneField('sentry.User', null=True, on_delete=models.SET_NULL, related_name='sentry_app') # The Organization the Sentry App was created in "owns" it. Members of that # Org have differing access, dependent on their role within the Org. owner = FlexibleForeignKey('sentry.Organization', related_name='owned_sentry_apps') name = models.TextField() slug = models.CharField(max_length=SENTRY_APP_SLUG_MAX_LENGTH, unique=True) status = BoundedPositiveIntegerField( default=SentryAppStatus.UNPUBLISHED, choices=SentryAppStatus.as_choices(), db_index=True, ) uuid = models.CharField(max_length=64, default=default_uuid) redirect_url = models.URLField(null=True) webhook_url = models.URLField() overview = models.TextField(null=True) date_added = models.DateTimeField(default=timezone.now) date_updated = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_sentryapp' @property def organizations(self): if not self.pk: return Organization.objects.none() return Organization \ .objects \ .select_related('sentry_app_installations') \ .filter(sentry_app_installations__sentry_app_id=self.id) @property def teams(self): from sentry.models import Team if not self.pk: return Team.objects.none() return Team.objects.filter(organization__in=self.organizations) def save(self, *args, **kwargs): self._set_slug() return super(SentryApp, self).save(*args, **kwargs) def is_installed_on(self, organization): return self.organizations.filter(pk=organization.pk).exists() def _set_slug(self): """ Matches ``name``, but in lowercase, dash form. >>> self._set_slug('My Cool App') >>> self.slug my-cool-app """ if not self.slug: self.slug = slugify(self.name)
class Alert(Model): project = FlexibleForeignKey('sentry.Project') group = FlexibleForeignKey('sentry.Group', null=True) datetime = models.DateTimeField(default=timezone.now) message = models.TextField() data = GzippedDictField(null=True) related_groups = models.ManyToManyField('sentry.Group', through='sentry.AlertRelatedGroup', related_name='related_alerts') status = BoundedPositiveIntegerField(default=0, choices=( (AlertStatus.UNRESOLVED, _('Unresolved')), (AlertStatus.RESOLVED, _('Resolved')), ), db_index=True) class Meta: app_label = 'sentry' db_table = 'sentry_alert' __repr__ = sane_repr('project_id', 'group_id', 'datetime') # TODO: move classmethods to manager @classmethod def get_recent_for_project(cls, project_id): return cls.objects.filter( project=project_id, group_id__isnull=True, datetime__gte=timezone.now() - timedelta(minutes=60), status=AlertStatus.UNRESOLVED, ).order_by('-datetime') @classmethod def maybe_alert(cls, project_id, message, group_id=None): now = timezone.now() manager = cls.objects # We only create an alert based on: # - an alert for the project hasn't been created in the last 30 minutes # - an alert for the event hasn't been created in the last 60 minutes # TODO: there is a race condition if we're calling this function for the same project kwargs = { 'project_id': project_id, 'datetime__gte': now - timedelta(minutes=60), } if group_id: kwargs['group'] = group_id if manager.filter(**kwargs).exists(): return alert = manager.create( project_id=project_id, group_id=group_id, datetime=now, message=message, ) return alert @property def team(self): return self.project.team @property def organization(self): return self.project.organization @property def is_resolved(self): return (self.status == AlertStatus.RESOLVED or self.datetime < timezone.now() - timedelta(minutes=60)) def get_absolute_url(self): return absolute_uri(reverse('sentry-alert-details', args=[ self.organization.slug, self.project.slug, self.id]))
class Activity(Model): __core__ = False SET_RESOLVED = 1 SET_UNRESOLVED = 2 SET_IGNORED = 3 SET_PUBLIC = 4 SET_PRIVATE = 5 SET_REGRESSION = 6 CREATE_ISSUE = 7 NOTE = 8 FIRST_SEEN = 9 RELEASE = 10 ASSIGNED = 11 UNASSIGNED = 12 SET_RESOLVED_IN_RELEASE = 13 MERGE = 14 SET_RESOLVED_BY_AGE = 15 SET_RESOLVED_IN_COMMIT = 16 DEPLOY = 17 NEW_PROCESSING_ISSUES = 18 UNMERGE_SOURCE = 19 UNMERGE_DESTINATION = 20 TYPE = ( # (TYPE, verb-slug) (SET_RESOLVED, 'set_resolved'), (SET_RESOLVED_BY_AGE, 'set_resolved_by_age'), (SET_RESOLVED_IN_RELEASE, 'set_resolved_in_release'), (SET_RESOLVED_IN_COMMIT, 'set_resolved_in_commit'), (SET_UNRESOLVED, 'set_unresolved'), (SET_IGNORED, 'set_ignored'), (SET_PUBLIC, 'set_public'), (SET_PRIVATE, 'set_private'), (SET_REGRESSION, 'set_regression'), (CREATE_ISSUE, 'create_issue'), (NOTE, 'note'), (FIRST_SEEN, 'first_seen'), (RELEASE, 'release'), (ASSIGNED, 'assigned'), (UNASSIGNED, 'unassigned'), (MERGE, 'merge'), (DEPLOY, 'deploy'), (NEW_PROCESSING_ISSUES, 'new_processing_issues'), (UNMERGE_SOURCE, 'unmerge_source'), (UNMERGE_DESTINATION, 'unmerge_destination'), ) project = FlexibleForeignKey('sentry.Project') group = FlexibleForeignKey('sentry.Group', null=True) # index on (type, ident) type = BoundedPositiveIntegerField(choices=TYPE) ident = models.CharField(max_length=64, null=True) # if the user is not set, it's assumed to be the system user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True) datetime = models.DateTimeField(default=timezone.now) data = GzippedDictField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_activity' __repr__ = sane_repr('project_id', 'group_id', 'event_id', 'user_id', 'type', 'ident') def __init__(self, *args, **kwargs): super(Activity, self).__init__(*args, **kwargs) from sentry.models import Release # XXX(dcramer): fix for bad data if self.type in (self.RELEASE, self.DEPLOY) and isinstance( self.data['version'], Release): self.data['version'] = self.data['version'].version if self.type == self.ASSIGNED: self.data['assignee'] = six.text_type(self.data['assignee']) def save(self, *args, **kwargs): created = bool(not self.id) super(Activity, self).save(*args, **kwargs) if not created: return # HACK: support Group.num_comments if self.type == Activity.NOTE: self.group.update(num_comments=F('num_comments') + 1) def delete(self, *args, **kwargs): super(Activity, self).delete(*args, **kwargs) # HACK: support Group.num_comments if self.type == Activity.NOTE: self.group.update(num_comments=F('num_comments') - 1) def send_notification(self): activity.send_activity_notifications.delay(self.id)
class Release(Model): """ A release is generally created when a new version is pushed into a production state. """ __core__ = False organization = FlexibleForeignKey('sentry.Organization') projects = models.ManyToManyField( 'sentry.Project', related_name='releases', through=ReleaseProject ) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=64) # ref might be the branch name being released ref = models.CharField(max_length=64, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey('sentry.User', null=True, blank=True) # materialized stats commit_count = BoundedPositiveIntegerField(null=True) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True) last_deploy_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_release' unique_together = (('organization', 'version'), ) __repr__ = sane_repr('organization', 'version') @staticmethod def is_valid_version(value): return not (any(c in value for c in BAD_RELEASE_CHARS) or value in ('.', '..') or not value) @classmethod def get_cache_key(cls, organization_id, version): return 'release:3:%s:%s' % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return 'releasecommits:{}:{}'.format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version, ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ('%s-%s' % (project.slug, version))[:64] releases = list( cls.objects.filter( organization_id=project.organization_id, version__in=[version, project_version], projects=project ) ) if releases: try: release = [r for r in releases if r.version == project_version][0] except IndexError: release = releases[0] else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) except IntegrityError: release = cls.objects.get( organization_id=project.organization_id, version=version ) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F('flags').bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) return release @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, Group, GroupRelease, GroupResolution ) model_list = ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, GroupRelease, GroupResolution ) for release in from_releases: for model in model_list: if hasattr(model, 'release'): update_kwargs = {'release': to_release} else: update_kwargs = {'release_id': to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update(**update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update(**update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update(first_release=to_release) release.delete() @property def short_version(self): version = self.version match = _dotted_path_prefix_re.match(version) if match is not None: version = version[match.end():] if _sha1_re.match(version): return version[:7] return version def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create( release=self, name=name, defaults={ 'date_added': date_added, 'organization_id': self.organization_id, } )[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F('flags').bitor(Project.flags.has_releases), ) except IntegrityError: return False else: return True def set_refs(self, refs, user, fetch=False): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = type(self).objects.filter( organization_id=self.organization_id, projects__in=self.projects.all(), ).exclude(version=self.version).order_by('-date_added').first() names = {r['repository'] for r in refs} repos = list( Repository.objects.filter( organization_id=self.organization_id, name__in=names, ) ) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository('Invalid repository names: %s' % ','.join(invalid_repos)) for ref in refs: repo = repos_by_name[ref['repository']] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref['commit'], )[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={ 'commit': commit, } ) if fetch: fetch_commits.apply_async( kwargs={ 'release_id': self.id, 'user_id': user.id, 'refs': refs, 'prev_release_id': prev_release and prev_release.id, } ) def set_commits(self, commit_list): """ Bind a list of commits to this release. These should be ordered from newest to oldest. This will clear any existing commit log and replace it with the given commits. """ from sentry.models import ( Commit, CommitAuthor, Group, GroupCommitResolution, GroupResolution, GroupStatus, ReleaseCommit, Repository ) from sentry.plugins.providers.repository import RepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get('message', '')) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter( release=self, ).delete() authors = {} repos = {} commit_author_by_commit = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get('repository' ) or 'organization-{}'.format(self.organization_id) if repo_name not in repos: repos[repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name, )[0] else: repo = repos[repo_name] author_email = data.get('author_email') if author_email is None and data.get('author_name'): author_email = ( re.sub(r'[^a-zA-Z0-9\-_\.]*', '', data['author_name']).lower() + '@localhost' ) if not author_email: author = None elif author_email not in authors: authors[author_email] = author = CommitAuthor.objects.get_or_create( organization_id=self.organization_id, email=author_email, defaults={ 'name': data.get('author_name'), } )[0] if data.get('author_name') and author.name != data['author_name']: author.update(name=data['author_name']) else: author = authors[author_email] defaults = { 'message': data.get('message'), 'author': author, 'date_added': data.get('timestamp') or timezone.now(), } commit, created = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=data['id'], defaults=defaults, ) if author is None: author = commit.author commit_author_by_commit[commit.id] = author patch_set = data.get('patch_set', []) for patched_file in patch_set: CommitFileChange.objects.get_or_create( organization_id=self.organization.id, commit=commit, filename=patched_file['path'], type=patched_file['type'], ) if not created: update_kwargs = {} if commit.message is None and defaults['message'] is not None: update_kwargs['message'] = defaults['message'] if commit.author_id is None and defaults['author'] is not None: update_kwargs['author'] = defaults['author'] if update_kwargs: commit.update(**update_kwargs) ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) if latest_commit is None: latest_commit = commit self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False, ).values_list('commit__author_id', flat=True).distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) commit_resolutions = list( GroupCommitResolution.objects.filter( commit_id__in=ReleaseCommit.objects.filter(release=self) .values_list('commit_id', flat=True), ).values_list('group_id', 'commit_id') ) user_by_author = {None: None} for group_id, commit_id in commit_resolutions: author = commit_author_by_commit.get(commit_id) if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ 'release': self, 'type': GroupResolution.Type.in_release, 'status': GroupResolution.Status.resolved, 'actor_id': actor.id if actor else None, }, ) Group.objects.filter( id=group_id, ).update(status=GroupStatus.RESOLVED)
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey("sentry.Project") logger = models.CharField(max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column="view") num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField( default=0, choices=( (GroupStatus.UNRESOLVED, _("Unresolved")), (GroupStatus.RESOLVED, _("Resolved")), (GroupStatus.IGNORED, _("Ignored")), ), db_index=True, ) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey("sentry.Release", null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) # deprecated, do not use. GroupShare has superseded is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager(cache_fields=("id", )) class Meta: app_label = "sentry" db_table = "sentry_groupedmessage" verbose_name_plural = _("grouped messages") verbose_name = _("grouped message") permissions = (("can_view", "Can view"), ) index_together = [("project", "first_release"), ("project", "id")] unique_together = (("project", "short_id"), ) __repr__ = sane_repr("project_id") def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) if self.times_seen is None: self.times_seen = 1 self.score = type(self).calculate_score(times_seen=self.times_seen, last_seen=self.last_seen) super(Group, self).save(*args, **kwargs) def get_absolute_url(self, params=None): # Built manually in preference to django.core.urlresolvers.reverse, # because reverse has a measured performance impact. url = u"organizations/{org}/issues/{id}/{params}".format( org=urlquote(self.organization.slug), id=self.id, params="?" + urlencode(params) if params else "", ) return absolute_uri(url) @property def qualified_short_id(self): if self.short_id is not None: return "%s-%s" % (self.project.slug.upper(), base32_encode(self.short_id)) def is_over_resolve_age(self): resolve_age = self.project.get_option("sentry:resolve_age", None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_ignored(self): return self.get_status() == GroupStatus.IGNORED def is_unresolved(self): return self.get_status() == GroupStatus.UNRESOLVED # TODO(dcramer): remove in 9.0 / after plugins no long ref is_muted = is_ignored def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze status = self.status if status == GroupStatus.IGNORED: try: snooze = GroupSnooze.objects.get_from_cache(group=self) except GroupSnooze.DoesNotExist: pass else: if not snooze.is_valid(group=self): status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return status def get_share_id(self): from sentry.models import GroupShare try: return GroupShare.objects.filter(group_id=self.id).values_list( "uuid", flat=True)[0] except IndexError: # Otherwise it has not been shared yet. return None @classmethod def from_share_id(cls, share_id): if not share_id or len(share_id) != 32: raise cls.DoesNotExist from sentry.models import GroupShare return cls.objects.get(id=GroupShare.objects.filter( uuid=share_id).values_list("group_id")) def get_score(self): return type(self).calculate_score(self.times_seen, self.last_seen) def get_latest_event(self): if not hasattr(self, "_latest_event"): self._latest_event = self.get_latest_event_for_environments() return self._latest_event def get_latest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.LATEST, environments=environments, issue_id=self.id, project_id=self.project_id, ) def get_oldest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.OLDEST, environments=environments, issue_id=self.id, project_id=self.project_id, ) def get_first_release(self): if self.first_release_id is None: return tagstore.get_first_release(self.project_id, self.id) return self.first_release.version def get_last_release(self): return tagstore.get_last_release(self.project_id, self.id) def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get("type", "default") def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ return self.data["metadata"] @property def title(self): et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) def location(self): et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) def error(self): warnings.warn("Group.error is deprecated, use Group.title", DeprecationWarning) return self.title error.short_description = _("error") @property def message_short(self): warnings.warn("Group.message_short is deprecated, use Group.title", DeprecationWarning) return self.title @property def organization(self): return self.project.organization @property def checksum(self): warnings.warn("Group.checksum is no longer used", DeprecationWarning) return "" def get_email_subject(self): return "%s - %s" % (self.qualified_short_id.encode("utf-8"), self.title.encode("utf-8")) def count_users_seen(self): return tagstore.get_groups_user_counts([self.project_id], [self.id], environment_ids=None, start=self.first_seen)[self.id] @classmethod def calculate_score(cls, times_seen, last_seen): return math.log(float(times_seen or 1)) * 600 + float( last_seen.strftime("%s")) @staticmethod def issues_mapping(group_ids, project_ids, organization): """ Create a dictionary of group_ids to their qualified_short_ids """ return { i.id: i.qualified_short_id for i in Group.objects.filter(id__in=group_ids, project_id__in=project_ids, project__organization=organization) }
class Organization(Model): """ An organization represents a group of individuals which maintain ownership of projects. """ __core__ = True name = models.CharField(max_length=64) slug = models.SlugField(unique=True) status = BoundedPositiveIntegerField(choices=( (OrganizationStatus.VISIBLE, _('Visible')), (OrganizationStatus.PENDING_DELETION, _('Pending Deletion')), (OrganizationStatus.DELETION_IN_PROGRESS, _('Deletion in Progress')), ), default=OrganizationStatus.VISIBLE) date_added = models.DateTimeField(default=timezone.now) members = models.ManyToManyField(settings.AUTH_USER_MODEL, through='sentry.OrganizationMember', related_name='org_memberships') default_role = models.CharField( choices=roles.get_choices(), max_length=32, default=roles.get_default().id, ) flags = BitField(flags=( ('allow_joinleave', 'Allow members to join and leave teams without requiring approval.'), ('enhanced_privacy', 'Enable enhanced privacy controls to limit personally identifiable information (PII) as well as source code in things like notifications.' ), ('disable_shared_issues', 'Disable sharing of limited details on issues to anonymous users.'), ('early_adopter', 'Enable early adopter status, gaining access to features prior to public release.' ), ), default=1) objects = OrganizationManager(cache_fields=( 'pk', 'slug', )) class Meta: app_label = 'sentry' db_table = 'sentry_organization' __repr__ = sane_repr('owner_id', 'name', 'slug') @classmethod def get_default(cls): """ Return the organization used in single organization mode. """ return cls.objects.filter(status=OrganizationStatus.VISIBLE, )[0] def __unicode__(self): return u'%s (%s)' % (self.name, self.slug) def save(self, *args, **kwargs): if not self.slug: lock = locks.get('slug:organization', duration=5) with TimedRetryPolicy(10)(lock.acquire): slugify_instance(self, self.name, reserved=RESERVED_ORGANIZATION_SLUGS) super(Organization, self).save(*args, **kwargs) else: super(Organization, self).save(*args, **kwargs) def delete(self): if self.is_default: raise Exception('You cannot delete the the default organization.') return super(Organization, self).delete() @cached_property def is_default(self): if not settings.SENTRY_SINGLE_ORGANIZATION: return False return self == type(self).get_default() def has_access(self, user, access=None): queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) return queryset.exists() def get_audit_log_data(self): return { 'id': self.id, 'slug': self.slug, 'name': self.name, 'status': self.status, 'flags': self.flags, 'default_role': self.default_role, } def get_owners(self): from sentry.models import User return User.objects.filter( sentry_orgmember_set__role=roles.get_top_dog().id, sentry_orgmember_set__organization=self, is_active=True, ) def get_default_owner(self): if not hasattr(self, '_default_owner'): self._default_owner = self.get_owners()[0] return self._default_owner def has_single_owner(self): from sentry.models import OrganizationMember count = OrganizationMember.objects.filter( organization=self, role=roles.get_top_dog().id, user__isnull=False, user__is_active=True, )[:2].count() return count == 1 def merge_to(from_org, to_org): from sentry.models import ( ApiKey, AuditLogEntry, Commit, OrganizationMember, OrganizationMemberTeam, Project, Release, ReleaseCommit, ReleaseEnvironment, ReleaseFile, Repository, Team, Environment, ) for from_member in OrganizationMember.objects.filter( organization=from_org, user__isnull=False): try: to_member = OrganizationMember.objects.get( organization=to_org, user=from_member.user, ) except OrganizationMember.DoesNotExist: from_member.update(organization=to_org) to_member = from_member else: qs = OrganizationMemberTeam.objects.filter( organizationmember=from_member, is_active=True, ).select_related() for omt in qs: OrganizationMemberTeam.objects.create_or_update( organizationmember=to_member, team=omt.team, defaults={ 'is_active': True, }, ) for team in Team.objects.filter(organization=from_org): try: with transaction.atomic(): team.update(organization=to_org) except IntegrityError: slugify_instance(team, team.name, organization=to_org) team.update( organization=to_org, slug=team.slug, ) for project in Project.objects.filter(organization=from_org): try: with transaction.atomic(): project.update(organization=to_org) except IntegrityError: slugify_instance(project, project.name, organization=to_org) project.update( organization=to_org, slug=project.slug, ) # TODO(jess): update this when adding unique constraint # on version, organization for releases for release in Release.objects.filter(organization=from_org): try: to_release = Release.objects.get(version=release.version, organization=to_org) except Release.DoesNotExist: Release.objects.filter(id=release.id).update( organization=to_org) else: Release.merge(to_release, [release]) for model in (ApiKey, AuditLogEntry, ReleaseFile): model.objects.filter( organization=from_org, ).update(organization=to_org) for model in (Commit, ReleaseCommit, ReleaseEnvironment, Repository, Environment): model.objects.filter(organization_id=from_org.id, ).update( organization_id=to_org.id) # TODO: Make these a mixin def update_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.set_value(self, *args, **kwargs) def get_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.get_value(self, *args, **kwargs) def delete_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.unset_value(self, *args, **kwargs) def send_delete_confirmation(self, audit_log_entry, countdown): from sentry import options from sentry.utils.email import MessageBuilder owners = self.get_owners() context = { 'organization': self, 'audit_log_entry': audit_log_entry, 'eta': timezone.now() + timedelta(seconds=countdown), 'url': absolute_uri( reverse( 'sentry-restore-organization', args=[self.slug], )), } MessageBuilder( subject='%sOrganization Queued for Deletion' % (options.get('mail.subject-prefix'), ), template='sentry/emails/org_delete_confirm.txt', html_template='sentry/emails/org_delete_confirm.html', type='org.confirm_delete', context=context, ).send_async([o.email for o in owners])
class AuditLogEntry(Model): __core__ = False organization = FlexibleForeignKey('sentry.Organization') actor_label = models.CharField(max_length=64, null=True, blank=True) # if the entry was created via a user actor = FlexibleForeignKey('sentry.User', related_name='audit_actors', null=True, blank=True) # if the entry was created via an api key actor_key = FlexibleForeignKey('sentry.ApiKey', null=True, blank=True) target_object = BoundedPositiveIntegerField(null=True) target_user = FlexibleForeignKey( 'sentry.User', null=True, blank=True, related_name='audit_targets' ) # TODO(dcramer): we want to compile this mapping into JSX for the UI event = BoundedPositiveIntegerField( choices=( # We emulate github a bit with event naming (AuditLogEntryEvent.MEMBER_INVITE, 'member.invite'), (AuditLogEntryEvent.MEMBER_ADD, 'member.add'), (AuditLogEntryEvent.MEMBER_ACCEPT, 'member.accept-invite'), (AuditLogEntryEvent.MEMBER_REMOVE, 'member.remove'), (AuditLogEntryEvent.MEMBER_EDIT, 'member.edit'), (AuditLogEntryEvent.MEMBER_JOIN_TEAM, 'member.join-team'), (AuditLogEntryEvent.MEMBER_LEAVE_TEAM, 'member.leave-team'), (AuditLogEntryEvent.TEAM_ADD, 'team.create'), (AuditLogEntryEvent.TEAM_EDIT, 'team.edit'), (AuditLogEntryEvent.TEAM_REMOVE, 'team.remove'), (AuditLogEntryEvent.PROJECT_ADD, 'project.create'), (AuditLogEntryEvent.PROJECT_EDIT, 'project.edit'), (AuditLogEntryEvent.PROJECT_REMOVE, 'project.remove'), (AuditLogEntryEvent.PROJECT_SET_PUBLIC, 'project.set-public'), (AuditLogEntryEvent.PROJECT_SET_PRIVATE, 'project.set-private'), (AuditLogEntryEvent.PROJECT_REQUEST_TRANSFER, 'project.request-transfer'), (AuditLogEntryEvent.PROJECT_ACCEPT_TRANSFER, 'project.accept-transfer'), (AuditLogEntryEvent.ORG_ADD, 'org.create'), (AuditLogEntryEvent.ORG_EDIT, 'org.edit'), (AuditLogEntryEvent.ORG_REMOVE, 'org.remove'), (AuditLogEntryEvent.ORG_RESTORE, 'org.restore'), (AuditLogEntryEvent.TAGKEY_REMOVE, 'tagkey.remove'), (AuditLogEntryEvent.PROJECTKEY_ADD, 'projectkey.create'), (AuditLogEntryEvent.PROJECTKEY_EDIT, 'projectkey.edit'), (AuditLogEntryEvent.PROJECTKEY_REMOVE, 'projectkey.remove'), (AuditLogEntryEvent.PROJECTKEY_ENABLE, 'projectkey.enable'), (AuditLogEntryEvent.PROJECTKEY_DISABLE, 'projectkey.disable'), (AuditLogEntryEvent.SSO_ENABLE, 'sso.enable'), (AuditLogEntryEvent.SSO_DISABLE, 'sso.disable'), (AuditLogEntryEvent.SSO_EDIT, 'sso.edit'), (AuditLogEntryEvent.SSO_IDENTITY_LINK, 'sso-identity.link'), (AuditLogEntryEvent.APIKEY_ADD, 'api-key.create'), (AuditLogEntryEvent.APIKEY_EDIT, 'api-key.edit'), (AuditLogEntryEvent.APIKEY_REMOVE, 'api-key.remove'), (AuditLogEntryEvent.RULE_ADD, 'rule.create'), (AuditLogEntryEvent.RULE_EDIT, 'rule.edit'), (AuditLogEntryEvent.RULE_REMOVE, 'rule.remove'), (AuditLogEntryEvent.SET_ONDEMAND, 'ondemand.edit'), (AuditLogEntryEvent.SERVICEHOOK_ADD, 'serivcehook.create'), (AuditLogEntryEvent.SERVICEHOOK_EDIT, 'serivcehook.edit'), (AuditLogEntryEvent.SERVICEHOOK_REMOVE, 'serivcehook.remove'), (AuditLogEntryEvent.SERVICEHOOK_ENABLE, 'serivcehook.enable'), (AuditLogEntryEvent.SERVICEHOOK_DISABLE, 'serivcehook.disable'), ) ) ip_address = models.GenericIPAddressField(null=True, unpack_ipv4=True) data = GzippedDictField() datetime = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_auditlogentry' __repr__ = sane_repr('organization_id', 'type') def save(self, *args, **kwargs): if not self.actor_label: assert self.actor or self.actor_key if self.actor: self.actor_label = self.actor.username else: self.actor_label = self.actor_key.key super(AuditLogEntry, self).save(*args, **kwargs) def get_actor_name(self): if self.actor: return self.actor.get_display_name() elif self.actor_key: return self.actor_key.key + ' (api key)' return self.actor_label def get_note(self): if self.event == AuditLogEntryEvent.MEMBER_INVITE: return 'invited member %s' % (self.data['email'], ) elif self.event == AuditLogEntryEvent.MEMBER_ADD: if self.target_user == self.actor: return 'joined the organization' return 'added member %s' % (self.target_user.get_display_name(), ) elif self.event == AuditLogEntryEvent.MEMBER_ACCEPT: return 'accepted the membership invite' elif self.event == AuditLogEntryEvent.MEMBER_REMOVE: if self.target_user == self.actor: return 'left the organization' return 'removed member %s' % ( self.data.get('email') or self.target_user.get_display_name(), ) elif self.event == AuditLogEntryEvent.MEMBER_EDIT: return 'edited member %s' % ( self.data.get('email') or self.target_user.get_display_name(), ) elif self.event == AuditLogEntryEvent.MEMBER_JOIN_TEAM: if self.target_user == self.actor: return 'joined team %s' % (self.data['team_slug'], ) return 'added %s to team %s' % ( self.data.get('email') or self.target_user.get_display_name(), self.data['team_slug'], ) elif self.event == AuditLogEntryEvent.MEMBER_LEAVE_TEAM: if self.target_user == self.actor: return 'left team %s' % (self.data['team_slug'], ) return 'removed %s from team %s' % ( self.data.get('email') or self.target_user.get_display_name(), self.data['team_slug'], ) elif self.event == AuditLogEntryEvent.ORG_ADD: return 'created the organization' elif self.event == AuditLogEntryEvent.ORG_EDIT: return 'edited the organization setting(s): ' + (', '.join(u'{} {}'.format(k, v) for k, v in self.data.items())) elif self.event == AuditLogEntryEvent.ORG_REMOVE: return 'removed the organization' elif self.event == AuditLogEntryEvent.ORG_RESTORE: return 'restored the organization' elif self.event == AuditLogEntryEvent.TEAM_ADD: return 'created team %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.TEAM_EDIT: return 'edited team %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.TEAM_REMOVE: return 'removed team %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_ADD: return 'created project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_EDIT: return 'edited project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_REMOVE: return 'removed project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_REQUEST_TRANSFER: return 'requested to transfer project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_ACCEPT_TRANSFER: return 'accepted transfer of project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.TAGKEY_REMOVE: return 'removed tags matching %s = *' % (self.data['key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_ADD: return 'added project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_EDIT: return 'edited project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_REMOVE: return 'removed project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_ENABLE: return 'enabled project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_DISABLE: return 'disabled project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.SSO_ENABLE: return 'enabled sso (%s)' % (self.data['provider'], ) elif self.event == AuditLogEntryEvent.SSO_DISABLE: return 'disabled sso (%s)' % (self.data['provider'], ) elif self.event == AuditLogEntryEvent.SSO_EDIT: return 'edited sso settings' elif self.event == AuditLogEntryEvent.SSO_IDENTITY_LINK: return 'linked their account to a new identity' elif self.event == AuditLogEntryEvent.APIKEY_ADD: return 'added api key %s' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.APIKEY_EDIT: return 'edited api key %s' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.APIKEY_REMOVE: return 'removed api key %s' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.RULE_ADD: return 'added rule "%s"' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.RULE_EDIT: return 'edited rule "%s"' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.RULE_REMOVE: return 'removed rule "%s"' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.SET_ONDEMAND: return 'changed on-demand max spend to $%d' % (self.data['ondemand'] / 100, ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_ADD: return 'added a service hook for "%s"' % (truncatechars(self.data['url'], 64), ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_EDIT: return 'edited the service hook for "%s"' % (truncatechars(self.data['url'], 64), ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_REMOVE: return 'removed the service hook for "%s"' % (truncatechars(self.data['url'], 64), ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_ENABLE: return 'enabled theservice hook for "%s"' % (truncatechars(self.data['url'], 64), ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_DISABLE: return 'disabled the service hook for "%s"' % (truncatechars(self.data['url'], 64), ) return ''