class TimeSeriesSnapshot(Model): __core__ = True start = models.DateTimeField() end = models.DateTimeField() values = ArrayField(of=ArrayField(models.IntegerField())) period = models.IntegerField() date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_timeseriessnapshot" @property def snuba_values(self): """ Returns values matching the snuba format, a list of dicts with 'time' and 'count' keys. :return: """ return { "data": [{ "time": time, "count": count } for time, count in self.values] }
class TimeSeriesSnapshot(Model): __include_in_export__ = True start = models.DateTimeField() end = models.DateTimeField() values = ArrayField(of=ArrayField(models.FloatField())) period = models.IntegerField() date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_timeseriessnapshot" @property def snuba_values(self): """ Returns values matching the snuba format, a list of dicts with 'time' and 'count' keys. :return: """ # We store the values here as floats so that we can support percentage stats. # We don't want to return the time as a float, and to keep things consistent # with what Snuba returns we cast floats to ints when they're whole numbers. return { "data": [{ "time": int(time), "count": count if count is None or not count.is_integer() else int(count), } for time, count in self.values] }
class TimeSeriesSnapshot(Model): __core__ = True start = models.DateTimeField() end = models.DateTimeField() values = ArrayField(of=ArrayField(models.IntegerField())) period = models.IntegerField() date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_timeseriessnapshot'
class TimeSeriesSnapshot(Model): __include_in_export__ = True start = models.DateTimeField() end = models.DateTimeField() values = ArrayField(of=ArrayField(models.FloatField())) period = models.IntegerField() date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_timeseriessnapshot"
class AlertRule(Model): __core__ = True objects = AlertRuleManager() objects_with_deleted = BaseManager() project = FlexibleForeignKey("sentry.Project", db_index=False, db_constraint=False) query_subscription = FlexibleForeignKey("sentry.QuerySubscription", unique=True, null=True) name = models.TextField() status = models.SmallIntegerField(default=AlertRuleStatus.PENDING.value) threshold_type = models.SmallIntegerField() alert_threshold = models.IntegerField() resolve_threshold = models.IntegerField() threshold_period = models.IntegerField() date_modified = models.DateTimeField(default=timezone.now) date_added = models.DateTimeField(default=timezone.now) # These will be removed after we've made these columns nullable. Moving to # QuerySubscription subscription_id = models.UUIDField(db_index=True, null=True) dataset = models.TextField(null=True) query = models.TextField(null=True) aggregations = ArrayField(of=models.IntegerField, null=True) time_window = models.IntegerField(null=True) resolution = models.IntegerField(null=True) class Meta: app_label = "sentry" db_table = "sentry_alertrule" unique_together = (("project", "name"),)
class PluginHealth(Model): __core__ = True name = models.CharField(max_length=128, db_index=True) features_list = ArrayField(of=models.TextField) date_added = models.DateTimeField(default=timezone.now) link = models.URLField(null=True, blank=True) author = models.CharField(max_length=64) metadata = JSONField() status = BoundedPositiveIntegerField( default=0, choices=( (ObjectStatus.VISIBLE, _('Active')), (ObjectStatus.PENDING_DELETION, _('Pending Deletion')), (ObjectStatus.DELETION_IN_PROGRESS, _('Deletion in Progress')), ), db_index=True) class Meta: app_label = 'sentry' db_table = 'sentry_pluginhealth' __repr__ = sane_repr('name') def run_tests(self): plugin_test = PluginHealthTest.objects.create(plugin_id=self.id, ) plugin_test.test_data = plugin_test.run_tests(self) plugin_test.save() return plugin_test
class Identity(Model): """ A verified link between a user and a third party identity. """ __include_in_export__ = False idp = FlexibleForeignKey("sentry.IdentityProvider") user = FlexibleForeignKey(settings.AUTH_USER_MODEL) external_id = models.TextField() data = EncryptedJsonField() status = BoundedPositiveIntegerField(default=IdentityStatus.UNKNOWN) scopes = ArrayField() date_verified = models.DateTimeField(default=timezone.now) date_added = models.DateTimeField(default=timezone.now) objects = IdentityManager() class Meta: app_label = "sentry" db_table = "sentry_identity" unique_together = (("idp", "external_id"), ("idp", "user")) def get_provider(self): from sentry.identity import get return get(self.idp.type)
class ServiceHook(Model): __include_in_export__ = True guid = models.CharField(max_length=32, unique=True, null=True) # hooks may be bound to an api application, or simply registered by a user application = FlexibleForeignKey("sentry.ApiApplication", null=True) actor_id = BoundedPositiveIntegerField(db_index=True) project_id = BoundedPositiveIntegerField(db_index=True, null=True) organization_id = BoundedPositiveIntegerField(db_index=True, null=True) url = models.URLField(max_length=512) secret = EncryptedTextField(default=generate_secret) events = ArrayField(of=models.TextField) status = BoundedPositiveIntegerField(default=0, choices=ObjectStatus.as_choices(), db_index=True) version = BoundedPositiveIntegerField(default=0, choices=((0, "0"), )) date_added = models.DateTimeField(default=timezone.now) objects = BaseManager(cache_fields=("guid", )) class Meta: app_label = "sentry" db_table = "sentry_servicehook" __repr__ = sane_repr("guid", "project_id") @property def created_by_sentry_app(self): return self.application_id and self.sentry_app @property def sentry_app(self): try: return SentryApp.objects.get(application_id=self.application_id) except SentryApp.DoesNotExist: return def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if self.guid is None: self.guid = uuid4().hex def __str__(self): return str(self.guid) def build_signature(self, body): return hmac.new(key=self.secret.encode("utf-8"), msg=body.encode("utf-8"), digestmod=sha256).hexdigest() def get_audit_log_data(self): return {"url": self.url} def add_project(self, project): """ Add a project to the service hook. """ ServiceHookProject.objects.create(project_id=project.id, service_hook_id=self.id)
class UserRole(DefaultFieldsModel): """ Roles are applied to administrative users and apply a set of `UserPermission`. """ __include_in_export__ = True name = models.CharField(max_length=32, unique=True) permissions = ArrayField() users = models.ManyToManyField("sentry.User", through="sentry.UserRoleUser") class Meta: app_label = "sentry" db_table = "sentry_userrole" __repr__ = sane_repr("name", "permissions") @classmethod def permissions_for_user(cls, user_id: int) -> FrozenSet[str]: """ Return a set of permission for the given user ID scoped to roles. """ return frozenset(i for sl in cls.objects.filter( users=user_id).values_list("permissions", flat=True) for i in sl)
class AlertRule(Model): __core__ = True objects = AlertRuleManager() objects_with_deleted = BaseManager() project = FlexibleForeignKey('sentry.Project', db_index=False, db_constraint=False) name = models.TextField() status = models.SmallIntegerField(default=AlertRuleStatus.PENDING.value) subscription_id = models.UUIDField(db_index=True) threshold_type = models.SmallIntegerField() dataset = models.TextField() query = models.TextField() aggregations = ArrayField(of=models.IntegerField) time_window = models.IntegerField() resolution = models.IntegerField() alert_threshold = models.IntegerField() resolve_threshold = models.IntegerField() threshold_period = models.IntegerField() date_modified = models.DateTimeField(default=timezone.now) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_alertrule' unique_together = (('project', 'name'), )
class ApiAuthorization(Model): """ Tracks which scopes a user has authorized for a given application. This is used to determine when we need re-prompt a user, as well as track overall approved applications (vs individual tokens). """ __core__ = True # users can generate tokens without being application-bound application = FlexibleForeignKey('sentry.ApiApplication', null=True) user = FlexibleForeignKey('sentry.User') scopes = BitField(flags=ApiScopes().to_bitfield()) scope_list = ArrayField(of=models.TextField) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_apiauthorization' unique_together = (('user', 'application'), ) __repr__ = sane_repr('user_id', 'application_id') def get_scopes(self): if self.scope_list: return self.scope_list return [k for k, v in six.iteritems(self.scopes) if v] def has_scope(self, scope): return scope in self.get_scopes()
class DashboardWidgetQuery(Model): """ A query in a dashboard widget. """ __include_in_export__ = True widget = FlexibleForeignKey("sentry.DashboardWidget") name = models.CharField(max_length=255) fields = ArrayField() conditions = models.TextField() # aggregates and columns will eventually replace fields. # Using django's built-in array field here since the one # from sentry/db/model/fields.py adds a default value to the # database migration. aggregates = DjangoArrayField(models.TextField(), null=True) columns = DjangoArrayField(models.TextField(), null=True) # Orderby condition for the query orderby = models.TextField(default="") # Order of the widget query in the widget. order = BoundedPositiveIntegerField() date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_dashboardwidgetquery" unique_together = (("widget", "order"), ) __repr__ = sane_repr("widget", "type", "name")
class ApiGrant(Model): """ A grant represents a token with a short lifetime that can be swapped for an access token, as described in :rfc:`4.1.2` of the OAuth 2 spec. """ __core__ = False user = FlexibleForeignKey('sentry.User') application = FlexibleForeignKey('sentry.ApiApplication') code = models.CharField( max_length=64, db_index=True, default=lambda: ApiGrant.generate_code()) expires_at = models.DateTimeField( db_index=True, default=lambda: timezone.now() + DEFAULT_EXPIRATION) redirect_uri = models.CharField(max_length=255) scopes = BitField(flags=( ('project:read', 'project:read'), ('project:write', 'project:write'), ('project:admin', 'project:admin'), ('project:releases', 'project:releases'), ('team:read', 'team:read'), ('team:write', 'team:write'), ('team:admin', 'team:admin'), ('event:read', 'event:read'), ('event:write', 'event:write'), ('event:admin', 'event:admin'), ('org:read', 'org:read'), ('org:write', 'org:write'), ('org:admin', 'org:admin'), ('member:read', 'member:read'), ('member:write', 'member:write'), ('member:admin', 'member:admin'), )) scope_list = ArrayField(of=models.TextField) class Meta: app_label = 'sentry' db_table = 'sentry_apigrant' @classmethod def generate_code(cls): return uuid4().hex def get_scopes(self): if self.scope_list: return self.scope_list return [k for k, v in six.iteritems(self.scopes) if v] def has_scope(self, scope): return scope in self.get_scopes() def is_expired(self): return timezone.now() >= self.expires_at def redirect_uri_allowed(self, uri): return uri == self.redirect_uri
class Identity(Model): """ A verified link between a user and a third party identity. """ __core__ = False idp = FlexibleForeignKey("sentry.IdentityProvider") user = FlexibleForeignKey(settings.AUTH_USER_MODEL) external_id = models.TextField() data = EncryptedJsonField() status = BoundedPositiveIntegerField(default=IdentityStatus.UNKNOWN) scopes = ArrayField() date_verified = models.DateTimeField(default=timezone.now) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_identity" unique_together = (("idp", "external_id"), ("idp", "user")) def get_provider(self): from sentry.identity import get return get(self.idp.type) @classmethod def reattach(cls, idp, external_id, user, defaults): """ Removes identities under `idp` associated with either `external_id` or `user` and creates a new identity linking them. """ lookup = Q(external_id=external_id) | Q(user=user) Identity.objects.filter(lookup, idp=idp).delete() logger.info( "deleted-identity", extra={ "external_id": external_id, "idp_id": idp.id, "user_id": user.id }, ) identity_model = Identity.objects.create(idp=idp, user=user, external_id=external_id, **defaults) logger.info( "created-identity", extra={ "idp_id": idp.id, "external_id": external_id, "object_id": identity_model.id, "user_id": user.id, }, ) return identity_model
class ApiGrant(Model): """ A grant represents a token with a short lifetime that can be swapped for an access token, as described in :rfc:`4.1.2` of the OAuth 2 spec. """ __core__ = False user = FlexibleForeignKey("sentry.User") application = FlexibleForeignKey("sentry.ApiApplication") code = models.CharField(max_length=64, db_index=True, default=generate_code) expires_at = models.DateTimeField(db_index=True, default=default_expiration) redirect_uri = models.CharField(max_length=255) scopes = BitField(flags=( ("project:read", "project:read"), ("project:write", "project:write"), ("project:admin", "project:admin"), ("project:releases", "project:releases"), ("team:read", "team:read"), ("team:write", "team:write"), ("team:admin", "team:admin"), ("event:read", "event:read"), ("event:write", "event:write"), ("event:admin", "event:admin"), ("org:read", "org:read"), ("org:write", "org:write"), ("org:admin", "org:admin"), ("member:read", "member:read"), ("member:write", "member:write"), ("member:admin", "member:admin"), )) scope_list = ArrayField(of=models.TextField) class Meta: app_label = "sentry" db_table = "sentry_apigrant" def get_scopes(self): if self.scope_list: return self.scope_list return [k for k, v in self.scopes.items() if v] def has_scope(self, scope): return scope in self.get_scopes() def is_expired(self): return timezone.now() >= self.expires_at def redirect_uri_allowed(self, uri): return uri == self.redirect_uri
class ServiceHook(Model): __core__ = True guid = models.CharField(max_length=32, unique=True, null=True) # hooks may be bound to an api application, or simply registered by a user application = FlexibleForeignKey('sentry.ApiApplication', null=True) actor_id = BoundedPositiveIntegerField(db_index=True) project_id = BoundedPositiveIntegerField(db_index=True) organization_id = BoundedPositiveIntegerField(db_index=True, null=True) url = models.URLField(max_length=512) secret = EncryptedTextField(default=generate_secret) events = ArrayField(of=models.TextField) status = BoundedPositiveIntegerField(default=0, choices=ObjectStatus.as_choices(), db_index=True) version = BoundedPositiveIntegerField(default=0, choices=((0, '0'), )) date_added = models.DateTimeField(default=timezone.now) objects = BaseManager(cache_fields=('guid', )) class Meta: app_label = 'sentry' db_table = 'sentry_servicehook' __repr__ = sane_repr('guid', 'project_id') @property def created_by_sentry_app(self): return (self.application_id and self.sentry_app) @property def sentry_app(self): try: return SentryApp.objects.get(application_id=self.application_id) except SentryApp.DoesNotExist: return def __init__(self, *args, **kwargs): super(ServiceHook, self).__init__(*args, **kwargs) if self.guid is None: self.guid = uuid4().hex def __unicode__(self): return six.text_type(self.guid) def build_signature(self, body): return hmac.new( key=self.secret.encode('utf-8'), msg=body.encode('utf-8'), digestmod=sha256, ).hexdigest() def get_audit_log_data(self): return {'url': self.url}
class QuerySubscription(Model): __core__ = True project = FlexibleForeignKey("sentry.Project", db_constraint=False) type = models.TextField() subscription_id = models.TextField(unique=True) dataset = models.TextField() query = models.TextField() aggregations = ArrayField(of=models.IntegerField) time_window = models.IntegerField() resolution = models.IntegerField() date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_querysubscription"
class SentryApp(ParanoidModel): __core__ = True application = models.OneToOneField('sentry.ApiApplication', related_name='sentry_app') # Much of the OAuth system in place currently depends on a User existing. # This "proxy user" represents the SentryApp in those cases. proxy_user = models.OneToOneField('sentry.User', related_name='sentry_app') # The owner is an actual Sentry User who created the SentryApp. Used to # determine who can manage the SentryApp itself. owner = FlexibleForeignKey('sentry.User', related_name='owned_sentry_apps') # The set of OAuth scopes necessary for this integration to function. scopes = BitField(flags=ApiScopes().to_bitfield()) scope_list = ArrayField(of=models.TextField()) name = models.TextField() slug = models.CharField(max_length=64, unique=True) uuid = models.CharField(max_length=64, default=lambda: six.binary_type(uuid.uuid4())) webhook_url = models.TextField() date_added = models.DateTimeField(default=timezone.now) date_updated = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_sentryapp' def save(self, *args, **kwargs): self._set_slug() return super(SentryApp, self).save(*args, **kwargs) def _set_slug(self): """ Matches ``name``, but in lowercase, dash form. >>> self._set_slug('My Cool App') >>> self.slug my-cool-app """ if not self.slug: self.slug = slugify(self.name)
class Identity(Model): """ A unique identity with an external provider (e.g. GitHub). """ __core__ = False idp = FlexibleForeignKey('sentry.IdentityProvider') external_id = models.CharField(max_length=64) data = EncryptedJsonField() status = BoundedPositiveIntegerField(default=IdentityStatus.UNKNOWN, ) scopes = ArrayField() date_verified = models.DateTimeField(default=timezone.now) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_identity' unique_together = (('idp', 'external_id'), )
def filter_by_semver( self, organization_id: int, semver_filter: SemverFilter, project_ids: Optional[Sequence[int]] = None, ) -> models.QuerySet: """ Filters releases based on a based `SemverFilter` instance. `SemverFilter.version_parts` can contain up to 6 components, which should map to the columns defined in `Release.SEMVER_COLS`. If fewer components are included, then we will exclude later columns from the filter. `SemverFilter.package` is optional, and if included we will filter the `package` column using the provided value. `SemverFilter.operator` should be a Django field filter. Typically we build a `SemverFilter` via `sentry.search.events.filter.parse_semver` """ qs = self.filter( organization_id=organization_id).annotate_prerelease_column() query_func = "exclude" if semver_filter.negated else "filter" if semver_filter.package: qs = getattr(qs, query_func)(package=semver_filter.package) if project_ids: qs = qs.filter(id__in=ReleaseProject.objects.filter( project_id__in=project_ids).values_list("release_id", flat=True)) if semver_filter.version_parts: filter_func = Func( *(Value(part) if isinstance(part, str) else part for part in semver_filter.version_parts), function="ROW", ) cols = self.model.SEMVER_COLS[:len(semver_filter.version_parts)] qs = qs.annotate(semver=Func(*(F(col) for col in cols), function="ROW", output_field=ArrayField())) qs = getattr( qs, query_func)(**{ f"semver__{semver_filter.operator}": filter_func }) return qs
class Identity(Model): """ A verified link between a user and a third party identity. """ __core__ = False idp = FlexibleForeignKey('sentry.IdentityProvider') user = FlexibleForeignKey(settings.AUTH_USER_MODEL) external_id = models.CharField(max_length=64) data = EncryptedJsonField() status = BoundedPositiveIntegerField(default=IdentityStatus.UNKNOWN) scopes = ArrayField() date_verified = models.DateTimeField(default=timezone.now) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_identity' unique_together = (('idp', 'external_id'), ('idp', 'user'))
class DashboardWidgetQuery(Model): """ A query in a dashboard widget. """ __core__ = True widget = FlexibleForeignKey("sentry.DashboardWidget") name = models.CharField(max_length=255) fields = ArrayField() conditions = models.TextField() order = BoundedPositiveIntegerField() date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_dashboardwidgetquery" unique_together = (("widget", "order"), ) __repr__ = sane_repr("widget", "type", "name")
class HasApiScopes(models.Model): """ Mixin for models that hold a list of OAuth Scopes. """ class Meta: abstract = True # List of scopes in bit form scopes = BitField(flags=ApiScopes().to_bitfield()) # Human readable list of scopes scope_list = ArrayField(of=models.TextField) def get_scopes(self): if self.scope_list: return self.scope_list return [k for k, v in self.scopes.items() if v] def has_scope(self, scope): return scope in self.get_scopes()
class Identity(Model): """ A verified link between a user and a third party identity. """ __core__ = False idp = FlexibleForeignKey('sentry.IdentityProvider') user = FlexibleForeignKey(settings.AUTH_USER_MODEL) external_id = models.CharField(max_length=64) data = EncryptedJsonField() status = BoundedPositiveIntegerField(default=IdentityStatus.UNKNOWN) scopes = ArrayField() date_verified = models.DateTimeField(default=timezone.now) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_identity' unique_together = (('idp', 'external_id'), ('idp', 'user')) def get_provider(self): from sentry.identity import get return get(self.idp.type) @classmethod def reattach(cls, idp, external_id, user, defaults): """ Removes identities under `idp` associated with either `external_id` or `user` and creates a new identity linking them. """ lookup = Q(external_id=external_id) | Q(user=user) Identity.objects.filter(lookup, idp=idp).delete() return Identity.objects.create( idp=idp, user=user, external_id=external_id, **defaults )
class Release(Model): """ A release is generally created when a new version is pushed into a production state. """ __core__ = False organization = FlexibleForeignKey('sentry.Organization') projects = models.ManyToManyField('sentry.Project', related_name='releases', through=ReleaseProject) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey('sentry.User', null=True, blank=True, on_delete=models.SET_NULL) # materialized stats commit_count = BoundedPositiveIntegerField(null=True, default=0) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True, default=0) last_deploy_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_release' unique_together = (('organization', 'version'), ) __repr__ = sane_repr('organization_id', 'version') @staticmethod def is_valid_version(value): return not (any(c in value for c in BAD_RELEASE_CHARS) or value in ('.', '..') or not value or value.lower() == 'latest') @classmethod def get_cache_key(cls, organization_id, version): return 'release:3:%s:%s' % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return u'releasecommits:{}:{}'.format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version, ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ('%s-%s' % (project.slug, version))[:DB_VERSION_LENGTH] releases = list( cls.objects.filter(organization_id=project.organization_id, version__in=[version, project_version], projects=project)) if releases: try: release = [ r for r in releases if r.version == project_version ][0] except IndexError: release = releases[0] else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) except IntegrityError: release = cls.objects.get( organization_id=project.organization_id, version=version) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F('flags').bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) return release @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import (ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, Group, GroupRelease, GroupResolution) model_list = (ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution) for release in from_releases: for model in model_list: if hasattr(model, 'release'): update_kwargs = {'release': to_release} else: update_kwargs = {'release_id': to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update( **update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update( **update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update( first_release=to_release) release.delete() @property def short_version(self): return Release.get_display_version(self.version) @staticmethod def get_display_version(version): match = _dotted_path_prefix_re.match(version) if match is not None: version = version[match.end():] if _sha1_re.match(version): return version[:7] return version def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create(release=self, name=name, defaults={ 'date_added': date_added, 'organization_id': self.organization_id, })[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F('flags').bitor( Project.flags.has_releases), ) except IntegrityError: return False else: return True def handle_commit_ranges(self, refs): """ Takes commit refs of the form: [ { 'previousCommit': None, 'commit': 'previous_commit..commit', } ] Note: Overwrites 'previousCommit' and 'commit' """ for ref in refs: if COMMIT_RANGE_DELIMITER in ref['commit']: ref['previousCommit'], ref['commit'] = ref['commit'].split( COMMIT_RANGE_DELIMITER) def set_refs(self, refs, user, fetch=False): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = type(self).objects.filter( organization_id=self.organization_id, projects__in=self.projects.all(), ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).exclude(version=self.version).order_by('-sort').first() names = {r['repository'] for r in refs} repos = list( Repository.objects.filter( organization_id=self.organization_id, name__in=names, )) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository('Invalid repository names: %s' % ','.join(invalid_repos)) self.handle_commit_ranges(refs) for ref in refs: repo = repos_by_name[ref['repository']] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref['commit'], )[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={ 'commit': commit, }) if fetch: fetch_commits.apply_async( kwargs={ 'release_id': self.id, 'user_id': user.id, 'refs': refs, 'prev_release_id': prev_release and prev_release.id, }) def set_commits(self, commit_list): """ Bind a list of commits to this release. This will clear any existing commit log and replace it with the given commits. """ # Sort commit list in reverse order commit_list.sort(key=lambda commit: commit.get('timestamp'), reverse=True) # TODO(dcramer): this function could use some cleanup/refactoring as its a bit unwieldly from sentry.models import (Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest) from sentry.plugins.providers.repository import RepositoryProvider from sentry.tasks.integrations import kick_off_status_syncs # todo(meredith): implement for IntegrationRepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get('message', '')) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): start = time() with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter(release=self, ).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get( 'repository') or u'organization-{}'.format( self.organization_id) if repo_name not in repos: repos[ repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name, )[0] else: repo = repos[repo_name] author_email = data.get('author_email') if author_email is None and data.get('author_name'): author_email = (re.sub(r'[^a-zA-Z0-9\-_\.]*', '', data['author_name']).lower() + '@localhost') if not author_email: author = None elif author_email not in authors: author_data = {'name': data.get('author_name')} author, created = CommitAuthor.objects.create_or_update( organization_id=self.organization_id, email=author_email, values=author_data) if not created: author = CommitAuthor.objects.get( organization_id=self.organization_id, email=author_email) authors[author_email] = author else: author = authors[author_email] commit_data = {} defaults = {} # Update/set message and author if they are provided. if author is not None: commit_data['author'] = author if 'message' in data: commit_data['message'] = data['message'] if 'timestamp' in data: commit_data['date_added'] = data['timestamp'] else: defaults['date_added'] = timezone.now() commit, created = Commit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, key=data['id'], defaults=defaults, values=commit_data) if not created: commit = Commit.objects.get( organization_id=self.organization_id, repository_id=repo.id, key=data['id']) if author is None: author = commit.author commit_author_by_commit[commit.id] = author patch_set = data.get('patch_set', []) for patched_file in patch_set: try: with transaction.atomic(): CommitFileChange.objects.create( organization_id=self.organization.id, commit=commit, filename=patched_file['path'], type=patched_file['type'], ) except IntegrityError: pass try: with transaction.atomic(): ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) except IntegrityError: pass if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False, ).values_list('commit__author_id', flat=True).distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) metrics.timing('release.set_commits.duration', time() - start) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in six.iteritems(head_commit_by_repo): try: with transaction.atomic(): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list( ReleaseCommit.objects.filter( release=self).select_related('commit').values( 'commit_id', 'commit__key')) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc['commit_id'] for rc in release_commits], ).values_list('group_id', 'linked_id')) commit_group_authors = [ ( cr[0], # group_id commit_author_by_commit.get(cr[1])) for cr in commit_resolutions ] pr_ids_by_merge_commit = list( PullRequest.objects.filter( merge_commit_sha__in=[ rc['commit__key'] for rc in release_commits ], organization_id=self.organization_id, ).values_list('id', flat=True)) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list('group_id', 'linked_id')) pr_authors = list( PullRequest.objects.filter(id__in=[ prr[1] for prr in pull_request_resolutions ], ).select_related('author')) pr_authors_dict = {pra.id: pra.author for pra in pr_authors} pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions] user_by_author = {None: None} commits_and_prs = list( itertools.chain(commit_group_authors, pull_request_group_authors), ) group_project_lookup = dict( Group.objects.filter(id__in=[ group_id for group_id, _ in commits_and_prs ], ).values_list('id', 'project_id')) for group_id, author in commits_and_prs: if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ 'release': self, 'type': GroupResolution.Type.in_release, 'status': GroupResolution.Status.resolved, 'actor_id': actor.id if actor else None, }, ) group = Group.objects.get(id=group_id, ) group.update(status=GroupStatus.RESOLVED) metrics.incr('group.resolved', instance='in_commit', skip_internal=True) issue_resolved.send_robust( organization_id=self.organization_id, user=actor, group=group, project=group.project, resolution_type='with_commit', sender=type(self), ) kick_off_status_syncs.apply_async( kwargs={ 'project_id': group_project_lookup[group_id], 'group_id': group_id, })
class ApiKey(Model): __core__ = True organization = FlexibleForeignKey("sentry.Organization", related_name="key_set") label = models.CharField(max_length=64, blank=True, default="Default") key = models.CharField(max_length=32, unique=True) scopes = BitField( flags=( ("project:read", "project:read"), ("project:write", "project:write"), ("project:admin", "project:admin"), ("project:releases", "project:releases"), ("team:read", "team:read"), ("team:write", "team:write"), ("team:admin", "team:admin"), ("event:read", "event:read"), ("event:write", "event:write"), ("event:admin", "event:admin"), ("org:read", "org:read"), ("org:write", "org:write"), ("org:admin", "org:admin"), ("member:read", "member:read"), ("member:write", "member:write"), ("member:admin", "member:admin"), ) ) scope_list = ArrayField(of=models.TextField) status = BoundedPositiveIntegerField( default=0, choices=((ApiKeyStatus.ACTIVE, _("Active")), (ApiKeyStatus.INACTIVE, _("Inactive"))), db_index=True, ) date_added = models.DateTimeField(default=timezone.now) allowed_origins = models.TextField(blank=True, null=True) objects = BaseManager(cache_fields=("key",)) class Meta: app_label = "sentry" db_table = "sentry_apikey" __repr__ = sane_repr("organization_id", "key") def __unicode__(self): return six.text_type(self.key) @classmethod def generate_api_key(cls): return uuid4().hex @property def is_active(self): return self.status == ApiKeyStatus.ACTIVE def save(self, *args, **kwargs): if not self.key: self.key = ApiKey.generate_api_key() super(ApiKey, self).save(*args, **kwargs) def get_allowed_origins(self): if not self.allowed_origins: return [] return filter(bool, self.allowed_origins.split("\n")) def get_audit_log_data(self): return { "label": self.label, "key": self.key, "scopes": self.get_scopes(), "status": self.status, } def get_scopes(self): if self.scope_list: return self.scope_list return [k for k, v in six.iteritems(self.scopes) if v] def has_scope(self, scope): return scope in self.get_scopes()
class ApiKey(Model): __core__ = True organization = FlexibleForeignKey('sentry.Organization', related_name='key_set') label = models.CharField(max_length=64, blank=True, default='Default') key = models.CharField(max_length=32, unique=True) scopes = BitField(flags=( ('project:read', 'project:read'), ('project:write', 'project:write'), ('project:admin', 'project:admin'), ('project:releases', 'project:releases'), ('team:read', 'team:read'), ('team:write', 'team:write'), ('team:admin', 'team:admin'), ('event:read', 'event:read'), ('event:write', 'event:write'), ('event:admin', 'event:admin'), ('org:read', 'org:read'), ('org:write', 'org:write'), ('org:admin', 'org:admin'), ('member:read', 'member:read'), ('member:write', 'member:write'), ('member:admin', 'member:admin'), )) scope_list = ArrayField(of=models.TextField) status = BoundedPositiveIntegerField(default=0, choices=( (ApiKeyStatus.ACTIVE, _('Active')), (ApiKeyStatus.INACTIVE, _('Inactive')), ), db_index=True) date_added = models.DateTimeField(default=timezone.now) allowed_origins = models.TextField(blank=True, null=True) objects = BaseManager(cache_fields=('key', )) class Meta: app_label = 'sentry' db_table = 'sentry_apikey' __repr__ = sane_repr('organization_id', 'key') def __unicode__(self): return six.text_type(self.key) @classmethod def generate_api_key(cls): return uuid4().hex @property def is_active(self): return self.status == ApiKeyStatus.ACTIVE def save(self, *args, **kwargs): if not self.key: self.key = ApiKey.generate_api_key() super(ApiKey, self).save(*args, **kwargs) def get_allowed_origins(self): if not self.allowed_origins: return [] return filter(bool, self.allowed_origins.split('\n')) def get_audit_log_data(self): return { 'label': self.label, 'key': self.key, 'scopes': self.get_scopes(), 'status': self.status, } def get_scopes(self): if self.scope_list: return self.scope_list return [k for k, v in six.iteritems(self.scopes) if v] def has_scope(self, scope): return scope in self.get_scopes()
class SentryApp(ParanoidModel, HasApiScopes): __core__ = True application = models.OneToOneField("sentry.ApiApplication", null=True, on_delete=models.SET_NULL, related_name="sentry_app") # Much of the OAuth system in place currently depends on a User existing. # This "proxy user" represents the SentryApp in those cases. proxy_user = models.OneToOneField("sentry.User", null=True, on_delete=models.SET_NULL, related_name="sentry_app") # The Organization the Sentry App was created in "owns" it. Members of that # Org have differing access, dependent on their role within the Org. owner = FlexibleForeignKey("sentry.Organization", related_name="owned_sentry_apps") name = models.TextField() slug = models.CharField(max_length=SENTRY_APP_SLUG_MAX_LENGTH, unique=True) author = models.TextField(null=True) status = BoundedPositiveIntegerField(default=SentryAppStatus.UNPUBLISHED, choices=SentryAppStatus.as_choices(), db_index=True) uuid = models.CharField(max_length=64, default=default_uuid) redirect_url = models.URLField(null=True) webhook_url = models.URLField(null=True) # does the application subscribe to `event.alert`, # meaning can it be used in alert rules as a {service} ? is_alertable = models.BooleanField(default=False) # does the application need to wait for verification # on behalf of the external service to know if its installations # are successfully installed ? verify_install = models.BooleanField(default=True) events = ArrayField(of=models.TextField, null=True) overview = models.TextField(null=True) schema = EncryptedJsonField(default=dict) date_added = models.DateTimeField(default=timezone.now) date_updated = models.DateTimeField(default=timezone.now) date_published = models.DateTimeField(null=True, blank=True) class Meta: app_label = "sentry" db_table = "sentry_sentryapp" @classmethod def visible_for_user(cls, request): from sentry.auth.superuser import is_active_superuser if is_active_superuser(request): return cls.objects.all() return cls.objects.filter(status=SentryAppStatus.PUBLISHED) @property def is_published(self): return self.status == SentryAppStatus.PUBLISHED @property def is_unpublished(self): return self.status == SentryAppStatus.UNPUBLISHED @property def is_internal(self): return self.status == SentryAppStatus.INTERNAL @property def slug_for_metrics(self): if self.is_internal: return "internal" if self.is_unpublished: return "unpublished" return self.slug def save(self, *args, **kwargs): self.date_updated = timezone.now() return super(SentryApp, self).save(*args, **kwargs) def is_installed_on(self, organization): return SentryAppInstallation.objects.filter( organization=organization).exists() def build_signature(self, body): secret = self.application.client_secret return hmac.new(key=secret.encode("utf-8"), msg=body.encode("utf-8"), digestmod=sha256).hexdigest() def show_auth_info(self, access): encoded_scopes = set({u"%s" % scope for scope in list(access.scopes)}) return set(self.scope_list).issubset(encoded_scopes)
class SentryApp(ParanoidModel, HasApiScopes): __core__ = True application = models.OneToOneField("sentry.ApiApplication", null=True, on_delete=models.SET_NULL, related_name="sentry_app") # Much of the OAuth system in place currently depends on a User existing. # This "proxy user" represents the SentryApp in those cases. proxy_user = models.OneToOneField("sentry.User", null=True, on_delete=models.SET_NULL, related_name="sentry_app") # The Organization the Sentry App was created in "owns" it. Members of that # Org have differing access, dependent on their role within the Org. owner = FlexibleForeignKey("sentry.Organization", related_name="owned_sentry_apps") name = models.TextField() slug = models.CharField(max_length=SENTRY_APP_SLUG_MAX_LENGTH, unique=True) author = models.TextField(null=True) status = BoundedPositiveIntegerField(default=SentryAppStatus.UNPUBLISHED, choices=SentryAppStatus.as_choices(), db_index=True) uuid = models.CharField(max_length=64, default=default_uuid) redirect_url = models.URLField(null=True) webhook_url = models.URLField() # does the application subscribe to `event.alert`, # meaning can it be used in alert rules as a {service} ? is_alertable = models.BooleanField(default=False) # does the application need to wait for verification # on behalf of the external service to know if its installations # are successully installed ? verify_install = models.BooleanField(default=True) events = ArrayField(of=models.TextField, null=True) overview = models.TextField(null=True) schema = EncryptedJsonField(default=dict) date_added = models.DateTimeField(default=timezone.now) date_updated = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_sentryapp" @classmethod def visible_for_user(cls, request): from sentry.auth.superuser import is_active_superuser if is_active_superuser(request): return cls.objects.all() return cls.objects.filter(status=SentryAppStatus.PUBLISHED) @property def organizations(self): if not self.pk: return Organization.objects.none() return Organization.objects.select_related( "sentry_app_installations").filter( sentry_app_installations__sentry_app_id=self.id) @property def teams(self): from sentry.models import Team if not self.pk: return Team.objects.none() return Team.objects.filter(organization__in=self.organizations) @property def is_published(self): return self.status == SentryAppStatus.PUBLISHED @property def is_unpublished(self): return self.status == SentryAppStatus.UNPUBLISHED @property def is_internal(self): return self.status == SentryAppStatus.INTERNAL def save(self, *args, **kwargs): self._set_slug() self.date_updated = timezone.now() return super(SentryApp, self).save(*args, **kwargs) def is_installed_on(self, organization): return self.organizations.filter(pk=organization.pk).exists() def _set_slug(self): """ Matches ``name``, but in lowercase, dash form. >>> self._set_slug('My Cool App') >>> self.slug my-cool-app """ if not self.slug: self.slug = slugify(self.name) if self.is_internal and not self._has_internal_slug(): self.slug = u"{}-{}".format( self.slug, hashlib.sha1(self.owner.slug).hexdigest()[0:6]) def _has_internal_slug(self): return re.match(r"\w+-[0-9a-zA-Z]+", self.slug) def build_signature(self, body): secret = self.application.client_secret return hmac.new(key=secret.encode("utf-8"), msg=body.encode("utf-8"), digestmod=sha256).hexdigest()
class Release(Model): """ A release is generally created when a new version is pushed into a production state. A commit is generally a git commit. See also releasecommit.py """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") projects = models.ManyToManyField("sentry.Project", related_name="releases", through=ReleaseProject) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) # new issues (groups) that arise as a consequence of this release new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey("sentry.User", null=True, blank=True, on_delete=models.SET_NULL) # materialized stats commit_count = BoundedPositiveIntegerField(null=True, default=0) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True, default=0) last_deploy_id = BoundedPositiveIntegerField(null=True) # HACK HACK HACK # As a transitionary step we permit release rows to exist multiple times # where they are "specialized" for a specific project. The goal is to # later split up releases by project again. This is for instance used # by the org release listing. _for_project_id = None class Meta: app_label = "sentry" db_table = "sentry_release" unique_together = (("organization", "version"), ) __repr__ = sane_repr("organization_id", "version") def __eq__(self, other): """Make sure that specialized releases are only comparable to the same other specialized release. This for instance lets us treat them separately for serialization purposes. """ return Model.__eq__( self, other) and self._for_project_id == other._for_project_id @staticmethod def is_valid_version(value): return not (not value or any(c in value for c in BAD_RELEASE_CHARS) or value in (".", "..") or value.lower() == "latest") @classmethod def get_cache_key(cls, organization_id, version): return "release:3:%s:%s" % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return u"releasecommits:{}:{}".format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): with metrics.timer("models.release.get_or_create") as metric_tags: return cls._get_or_create_impl(project, version, date_added, metric_tags) @classmethod def _get_or_create_impl(cls, project, version, date_added, metric_tags): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ("%s-%s" % (project.slug, version))[:DB_VERSION_LENGTH] releases = list( cls.objects.filter( organization_id=project.organization_id, version__in=[version, project_version], projects=project, )) if releases: try: release = [ r for r in releases if r.version == project_version ][0] except IndexError: release = releases[0] metric_tags["created"] = "false" else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) metric_tags["created"] = "true" except IntegrityError: metric_tags["created"] = "false" release = cls.objects.get( organization_id=project.organization_id, version=version) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F("flags").bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) metric_tags["cache_hit"] = "false" else: metric_tags["cache_hit"] = "true" return release @cached_property def version_info(self): try: return parse_release(self.version) except RelayError: # This can happen on invalid legacy releases return None @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, Group, GroupRelease, GroupResolution, ) model_list = ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution, ) for release in from_releases: for model in model_list: if hasattr(model, "release"): update_kwargs = {"release": to_release} else: update_kwargs = {"release_id": to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update( **update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update( **update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update( first_release=to_release) release.delete() def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create( release=self, name=name, defaults={ "date_added": date_added, "organization_id": self.organization_id }, )[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F("flags").bitor(Project.flags.has_releases)) except IntegrityError: return False else: return True def handle_commit_ranges(self, refs): """ Takes commit refs of the form: [ { 'previousCommit': None, 'commit': 'previous_commit..commit', } ] Note: Overwrites 'previousCommit' and 'commit' """ for ref in refs: if COMMIT_RANGE_DELIMITER in ref["commit"]: ref["previousCommit"], ref["commit"] = ref["commit"].split( COMMIT_RANGE_DELIMITER) def set_refs(self, refs, user, fetch=False): with sentry_sdk.start_span(op="set_refs"): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = (type(self).objects.filter( organization_id=self.organization_id, projects__in=self.projects.all()).extra( select={ "sort": "COALESCE(date_released, date_added)" }).exclude(version=self.version).order_by("-sort").first()) names = {r["repository"] for r in refs} repos = list( Repository.objects.filter(organization_id=self.organization_id, name__in=names)) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository("Invalid repository names: %s" % ",".join(invalid_repos)) self.handle_commit_ranges(refs) for ref in refs: repo = repos_by_name[ref["repository"]] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref["commit"])[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={"commit": commit}, ) if fetch: fetch_commits.apply_async( kwargs={ "release_id": self.id, "user_id": user.id, "refs": refs, "prev_release_id": prev_release and prev_release.id, }) def set_commits(self, commit_list): """ Bind a list of commits to this release. This will clear any existing commit log and replace it with the given commits. """ # Sort commit list in reverse order commit_list.sort(key=lambda commit: commit.get("timestamp", 0), reverse=True) # TODO(dcramer): this function could use some cleanup/refactoring as it's a bit unwieldy from sentry.models import ( Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest, ) from sentry.plugins.providers.repository import RepositoryProvider from sentry.tasks.integrations import kick_off_status_syncs # todo(meredith): implement for IntegrationRepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get("message", "")) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): start = time() with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter(release=self).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get( "repository") or u"organization-{}".format( self.organization_id) if repo_name not in repos: repos[ repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name)[0] else: repo = repos[repo_name] author_email = data.get("author_email") if author_email is None and data.get("author_name"): author_email = (re.sub(r"[^a-zA-Z0-9\-_\.]*", "", data["author_name"]).lower() + "@localhost") author_email = truncatechars(author_email, 75) if not author_email: author = None elif author_email not in authors: author_data = {"name": data.get("author_name")} author, created = CommitAuthor.objects.get_or_create( organization_id=self.organization_id, email=author_email, defaults=author_data, ) if author.name != author_data["name"]: author.update(name=author_data["name"]) authors[author_email] = author else: author = authors[author_email] commit_data = {} # Update/set message and author if they are provided. if author is not None: commit_data["author"] = author if "message" in data: commit_data["message"] = data["message"] if "timestamp" in data: commit_data["date_added"] = data["timestamp"] commit, created = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=data["id"], defaults=commit_data, ) if not created: commit_data = { key: value for key, value in six.iteritems(commit_data) if getattr(commit, key) != value } if commit_data: commit.update(**commit_data) if author is None: author = commit.author commit_author_by_commit[commit.id] = author # Guard against patch_set being None patch_set = data.get("patch_set") or [] for patched_file in patch_set: try: with transaction.atomic(): CommitFileChange.objects.create( organization_id=self.organization.id, commit=commit, filename=patched_file["path"], type=patched_file["type"], ) except IntegrityError: pass try: with transaction.atomic(): ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) except IntegrityError: pass if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False). values_list("commit__author_id", flat=True).distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) metrics.timing("release.set_commits.duration", time() - start) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in six.iteritems(head_commit_by_repo): try: with transaction.atomic(): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list( ReleaseCommit.objects.filter( release=self).select_related("commit").values( "commit_id", "commit__key")) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc["commit_id"] for rc in release_commits], ).values_list("group_id", "linked_id")) commit_group_authors = [ (cr[0], commit_author_by_commit.get(cr[1])) for cr in commit_resolutions # group_id ] pr_ids_by_merge_commit = list( PullRequest.objects.filter( merge_commit_sha__in=[ rc["commit__key"] for rc in release_commits ], organization_id=self.organization_id, ).values_list("id", flat=True)) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list("group_id", "linked_id")) pr_authors = list( PullRequest.objects.filter( id__in=[prr[1] for prr in pull_request_resolutions ]).select_related("author")) pr_authors_dict = {pra.id: pra.author for pra in pr_authors} pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions] user_by_author = {None: None} commits_and_prs = list( itertools.chain(commit_group_authors, pull_request_group_authors)) group_project_lookup = dict( Group.objects.filter( id__in=[group_id for group_id, _ in commits_and_prs]).values_list( "id", "project_id")) for group_id, author in commits_and_prs: if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ "release": self, "type": GroupResolution.Type.in_release, "status": GroupResolution.Status.resolved, "actor_id": actor.id if actor else None, }, ) group = Group.objects.get(id=group_id) group.update(status=GroupStatus.RESOLVED) remove_group_from_inbox(group) metrics.incr("group.resolved", instance="in_commit", skip_internal=True) issue_resolved.send_robust( organization_id=self.organization_id, user=actor, group=group, project=group.project, resolution_type="with_commit", sender=type(self), ) kick_off_status_syncs.apply_async( kwargs={ "project_id": group_project_lookup[group_id], "group_id": group_id }) def safe_delete(self): """Deletes a release if possible or raises a `UnsafeReleaseDeletion` exception. """ from sentry.models import Group, ReleaseFile from sentry.snuba.sessions import check_has_health_data # we don't want to remove the first_release metadata on the Group, and # while people might want to kill a release (maybe to remove files), # removing the release is prevented if Group.objects.filter(first_release=self).exists(): raise UnsafeReleaseDeletion(ERR_RELEASE_REFERENCED) # We do not allow releases with health data to be deleted because # the upserting from snuba data would create the release again. # We would need to be able to delete this data from snuba which we # can't do yet. project_ids = list(self.projects.values_list("id").all()) if check_has_health_data([(p[0], self.version) for p in project_ids]): raise UnsafeReleaseDeletion(ERR_RELEASE_HEALTH_DATA) # TODO(dcramer): this needs to happen in the queue as it could be a long # and expensive operation file_list = ReleaseFile.objects.filter( release=self).select_related("file") for releasefile in file_list: releasefile.file.delete() releasefile.delete() self.delete()