class ExportedData(Model): """ Stores references to asynchronous data export jobs being stored in the Google Cloud Platform temporary storage solution. """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") user = FlexibleForeignKey(settings.AUTH_USER_MODEL) file = FlexibleForeignKey("sentry.File", null=True, db_constraint=False) date_added = models.DateTimeField(default=timezone.now) date_finished = models.DateTimeField(null=True) date_expired = models.DateTimeField(null=True) query_type = BoundedPositiveIntegerField( choices=ExportQueryType.as_choices()) query_info = JSONField() @property def status(self): if self.date_finished is None: return ExportStatus.Early elif self.date_expired < timezone.now(): return ExportStatus.Expired else: return ExportStatus.Valid class Meta: app_label = "sentry" db_table = "sentry_exporteddata" __repr__ = sane_repr("data_id")
class ExternalIssue(Model): __include_in_export__ = False # The foreign key here is an `int`, not `bigint`. organization = FlexibleForeignKey("sentry.Organization", db_constraint=False) # The foreign key here is an `int`, not `bigint`. integration = FlexibleForeignKey("sentry.Integration", db_constraint=False) key = models.CharField(max_length=128) # example APP-123 in jira date_added = models.DateTimeField(default=timezone.now) title = models.TextField(null=True) description = models.TextField(null=True) metadata = JSONField(null=True) objects = ExternalIssueManager() class Meta: app_label = "sentry" db_table = "sentry_externalissue" unique_together = (("organization", "integration", "key"), ) __repr__ = sane_repr("organization_id", "integration_id", "key") def get_installation(self) -> Any: from sentry.models import Integration return Integration.objects.get( id=self.integration_id).get_installation( organization_id=self.organization_id)
class Widget(Model): """ A dashboard widget. """ __core__ = True dashboard = FlexibleForeignKey('sentry.Dashboard') order = BoundedPositiveIntegerField() title = models.CharField(max_length=255) display_type = BoundedPositiveIntegerField( choices=WidgetDisplayTypes.as_choices()) display_options = JSONField(default={}) date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField( default=ObjectStatus.VISIBLE, choices=ObjectStatus.as_choices(), ) class Meta: app_label = 'sentry' db_table = 'sentry_widget' unique_together = ( ('dashboard', 'order'), ('dashboard', 'title'), ) __repr__ = sane_repr('dashboard', 'title')
class GroupInbox(Model): """ A Group that is in the inbox. """ __core__ = False group = FlexibleForeignKey("sentry.Group", unique=True, db_constraint=False) project = FlexibleForeignKey("sentry.Project", null=True, db_constraint=False) organization = FlexibleForeignKey("sentry.Organization", null=True, db_constraint=False) reason = models.PositiveSmallIntegerField( null=False, default=GroupInboxReason.NEW.value) reason_details = JSONField(null=True) date_added = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = "sentry" db_table = "sentry_groupinbox" index_together = (("project", "date_added"), )
class ProjectDebugFile(Model): __core__ = False file = FlexibleForeignKey("sentry.File") checksum = models.CharField(max_length=40, null=True, db_index=True) object_name = models.TextField() cpu_name = models.CharField(max_length=40) project = FlexibleForeignKey("sentry.Project", null=True) debug_id = models.CharField(max_length=64, db_column=u"uuid") code_id = models.CharField(max_length=64, null=True) data = JSONField(null=True) objects = ProjectDebugFileManager() class Meta: index_together = (("project", "debug_id"), ("project", "code_id")) db_table = "sentry_projectdsymfile" app_label = "sentry" __repr__ = sane_repr("object_name", "cpu_name", "debug_id") @property def file_format(self): ct = self.file.headers.get("Content-Type", "unknown").lower() return KNOWN_DIF_FORMATS.get(ct, "unknown") @property def file_type(self): if self.data: return self.data.get("type") @property def file_extension(self): if self.file_format == "breakpad": return ".sym" if self.file_format == "macho": return "" if self.file_type == "exe" else ".dSYM" if self.file_format == "proguard": return ".txt" if self.file_format == "elf": return "" if self.file_type == "exe" else ".debug" if self.file_format == "pe": return ".exe" if self.file_type == "exe" else ".dll" if self.file_format == "pdb": return ".pdb" if self.file_format == "sourcebundle": return ".src.zip" if self.file_format == "wasm": return ".wasm" return "" @property def features(self): return frozenset((self.data or {}).get("features", [])) def delete(self, *args, **kwargs): super(ProjectDebugFile, self).delete(*args, **kwargs) self.file.delete()
class OrganizationOnboardingTask(Model): """ Onboarding tasks walk new Sentry orgs through basic features of Sentry. data field options (not all tasks have data fields): FIRST_EVENT: { 'platform': 'flask', } INVITE_MEMBER: { 'invited_member': user.id, 'teams': [team.id] } ISSUE_TRACKER | NOTIFICATION_SERVICE: { 'plugin': 'plugin_name' } ISSUE_ASSIGNMENT: { 'assigned_member': user.id } SECOND_PLATFORM: { 'platform': 'javascript' } """ __core__ = False TASK_CHOICES = ( # Send an organization's first event to Sentry (OnboardingTask.FIRST_EVENT, 'First event'), (OnboardingTask.INVITE_MEMBER, 'Invite member'), # Add a second member to your Sentry org. (OnboardingTask.ISSUE_TRACKER, 'Issue tracker'), # Hook up an external issue tracker. (OnboardingTask.NOTIFICATION_SERVICE, 'Notification services'), # Setup a notification services (OnboardingTask.SECOND_PLATFORM, 'Second platform'), # Send an event from a second platform (OnboardingTask.USER_CONTEXT, 'User context'), # Add user context to errors (OnboardingTask.SOURCEMAPS, 'Upload sourcemaps'), # Upload sourcemaps for compiled js code (OnboardingTask.RELEASE_TRACKING, 'Release tracking'), # Add release data to Sentry events (OnboardingTask.USER_REPORTS, 'User reports'), # Send user reports ) STATUS_CHOICES = ( (OnboardingTaskStatus.COMPLETE, 'Complete'), (OnboardingTaskStatus.PENDING, 'Pending'), (OnboardingTaskStatus.SKIPPED, 'Skipped'), ) organization = FlexibleForeignKey('sentry.Organization') user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True) # user that completed task = BoundedPositiveIntegerField(choices=TASK_CHOICES) status = BoundedPositiveIntegerField(choices=STATUS_CHOICES) date_completed = models.DateTimeField(default=timezone.now) project_id = BoundedBigIntegerField(blank=True, null=True) data = JSONField() # INVITE_MEMBER { invited_member: user.id } objects = OrganizationOnboardingTaskManager() class Meta: app_label = 'sentry' db_table = 'sentry_organizationonboardingtask' unique_together = (('organization', 'task'), ) __repr__ = sane_repr('organization', 'task')
class ExportedData(Model): """ Stores references to asynchronous data export jobs being stored in the Google Cloud Platform temporary storage solution. """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.SET_NULL) file = FlexibleForeignKey("sentry.File", null=True, db_constraint=False, on_delete=models.SET_NULL) date_added = models.DateTimeField(default=timezone.now) date_finished = models.DateTimeField(null=True) date_expired = models.DateTimeField(null=True, db_index=True) query_type = BoundedPositiveIntegerField( choices=ExportQueryType.as_choices()) query_info = JSONField() @property def status(self): if self.date_finished is None: return ExportStatus.Early elif self.date_expired < timezone.now(): return ExportStatus.Expired else: return ExportStatus.Valid def delete_file(self): if self.file: self.file.delete() def delete(self, *args, **kwargs): self.delete_file() super(ExportedData, self).delete(*args, **kwargs) def finalize_upload(self, file, expiration=DEFAULT_EXPIRATION): self.delete_file() current_time = timezone.now() expire_time = current_time + expiration self.update(file=file, date_finished=current_time, date_expired=expire_time) # TODO(Leander): Implement email notification class Meta: app_label = "sentry" db_table = "sentry_exporteddata" __repr__ = sane_repr("query_type", "query_info")
class ProjectDebugFile(Model): __core__ = False file = FlexibleForeignKey('sentry.File') object_name = models.TextField() cpu_name = models.CharField(max_length=40) project = FlexibleForeignKey('sentry.Project', null=True) debug_id = models.CharField(max_length=64, db_column='uuid') code_id = models.CharField(max_length=64, null=True) data = JSONField(null=True) objects = ProjectDebugFileManager() class Meta: index_together = (('project', 'debug_id'), ('project', 'code_id')) db_table = 'sentry_projectdsymfile' app_label = 'sentry' __repr__ = sane_repr('object_name', 'cpu_name', 'debug_id') @property def file_format(self): ct = self.file.headers.get('Content-Type', 'unknown').lower() return KNOWN_DIF_FORMATS.get(ct, 'unknown') @property def file_type(self): if self.data: return self.data.get('type') @property def file_extension(self): if self.file_format == 'breakpad': return '.sym' if self.file_format == 'macho': return '' if self.file_type == 'exe' else '.dSYM' if self.file_format == 'proguard': return '.txt' if self.file_format == 'elf': return '' if self.file_type == 'exe' else '.debug' if self.file_format == 'pe': return '.exe' if self.file_type == 'exe' else '.dll' if self.file_format == 'pdb': return '.pdb' return '' @property def features(self): return frozenset((self.data or {}).get('features', [])) def delete(self, *args, **kwargs): super(ProjectDebugFile, self).delete(*args, **kwargs) self.file.delete()
class ScheduledJob(Model): __core__ = False name = models.CharField(max_length=128) payload = JSONField() date_added = models.DateTimeField(default=timezone.now) date_scheduled = models.DateTimeField() class Meta: app_label = 'sentry' db_table = 'sentry_scheduledjob' __repr__ = sane_repr('name', 'date_scheduled')
class ScheduledJob(Model): __include_in_export__ = False name = models.CharField(max_length=128) payload = JSONField() date_added = models.DateTimeField(default=timezone.now) date_scheduled = models.DateTimeField() class Meta: app_label = "sentry" db_table = "sentry_scheduledjob" __repr__ = sane_repr("name", "date_scheduled")
class GroupLink(Model): """ Link a group with an external resource like a commit, issue, or pull request """ __include_in_export__ = False class Relationship: unknown = 0 resolves = 1 references = 2 class LinkedType: unknown = 0 commit = 1 pull_request = 2 issue = 3 group = FlexibleForeignKey("sentry.Group", db_constraint=False) project = FlexibleForeignKey("sentry.Project", db_constraint=False, db_index=True) linked_type = BoundedPositiveIntegerField( default=LinkedType.commit, choices=( (LinkedType.commit, _("Commit")), (LinkedType.pull_request, _("Pull Request")), (LinkedType.issue, _("Tracker Issue")), ), ) linked_id = BoundedBigIntegerField() relationship = BoundedPositiveIntegerField( default=Relationship.references, choices=((Relationship.resolves, _("Resolves")), (Relationship.references, _("Linked"))), ) data = JSONField() datetime = models.DateTimeField(default=timezone.now, db_index=True) objects = GroupLinkManager() class Meta: app_label = "sentry" db_table = "sentry_grouplink" unique_together = (("group", "linked_type", "linked_id"), ) __repr__ = sane_repr("group_id", "linked_type", "linked_id", "relationship", "datetime")
class ExternalIssue(Model): __include_in_export__ = False organization_id = BoundedPositiveIntegerField() integration_id = BoundedPositiveIntegerField() key = models.CharField(max_length=128) # example APP-123 in jira date_added = models.DateTimeField(default=timezone.now) title = models.TextField(null=True) description = models.TextField(null=True) metadata = JSONField(null=True) class Meta: app_label = "sentry" db_table = "sentry_externalissue" unique_together = (("organization_id", "integration_id", "key"),) __repr__ = sane_repr("organization_id", "integration_id", "key")
class ProjectCodeOwners(DefaultFieldsModel): __core__ = False # no db constraint to prevent locks on the Project table project = FlexibleForeignKey("sentry.Project", db_constraint=False) # repository_project_path_config ⇒ use this to transform CODEOWNERS paths to stacktrace paths repository_project_path_config = FlexibleForeignKey( "sentry.RepositoryProjectPathConfig", on_delete=models.PROTECT) # raw ⇒ original CODEOWNERS file. raw = models.TextField(null=True) # schema ⇒ transformed into IssueOwner syntax schema = JSONField(null=True) # override date_added from DefaultFieldsModel date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_projectcodeowners"
class ScheduledDeletion(Model): __core__ = False guid = models.CharField(max_length=32, unique=True, default=default_guid) app_label = models.CharField(max_length=64) model_name = models.CharField(max_length=64) object_id = BoundedBigIntegerField() date_added = models.DateTimeField(default=timezone.now) date_scheduled = models.DateTimeField(default=default_date_schedule) actor_id = BoundedBigIntegerField(null=True) data = JSONField(default={}) in_progress = models.BooleanField(default=False) aborted = models.BooleanField(default=False) class Meta: unique_together = (("app_label", "model_name", "object_id"), ) app_label = "sentry" db_table = "sentry_scheduleddeletion" @classmethod def schedule(cls, instance, days=30, data=None, actor=None): return cls.objects.create( app_label=instance._meta.app_label, model_name=type(instance).__name__, object_id=instance.pk, date_scheduled=timezone.now() + timedelta(days=days), data=data or {}, actor_id=actor.id if actor else None, ) def get_model(self): return apps.get_model(self.app_label, self.model_name) def get_instance(self): return self.get_model().objects.get(pk=self.object_id) def get_actor(self): from sentry.models import User if not self.actor_id: return None try: return User.objects.get(id=self.actor_id) except User.DoesNotExist: return None
class GroupLink(Model): """ Link a group with an external resource like a commit, issue, or pull request """ __core__ = False class Relationship: unknown = 0 resolves = 1 references = 2 class LinkedType: unknown = 0 commit = 1 pull_request = 2 issue = 3 group_id = BoundedBigIntegerField() project_id = BoundedBigIntegerField(db_index=True) linked_type = BoundedPositiveIntegerField( default=LinkedType.commit, choices=( (LinkedType.commit, _('Commit')), (LinkedType.pull_request, _('Pull Request')), (LinkedType.issue, _('Tracker Issue')), ), ) linked_id = BoundedBigIntegerField() relationship = BoundedPositiveIntegerField( default=Relationship.references, choices=( (Relationship.resolves, _('Resolves')), (Relationship.references, _('Linked')), ), ) data = JSONField() datetime = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = 'sentry' db_table = 'sentry_grouplink' unique_together = (('group_id', 'linked_type', 'linked_id'), ) __repr__ = sane_repr('group_id', 'linked_type', 'linked_id', 'relationship', 'datetime')
class ProjectCodeOwners(DefaultFieldsModel): __core__ = False # no db constraint to prevent locks on the Project table project = FlexibleForeignKey("sentry.Project", db_constraint=False) # repository_project_path_config ⇒ use this to transform CODEOWNERS paths to stacktrace paths repository_project_path_config = FlexibleForeignKey( "sentry.RepositoryProjectPathConfig", unique=True, on_delete=models.PROTECT) # raw ⇒ original CODEOWNERS file. raw = models.TextField(null=True) # schema ⇒ transformed into IssueOwner syntax schema = JSONField(null=True) # override date_added from DefaultFieldsModel date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_projectcodeowners" __repr__ = sane_repr("project_id", "id") @classmethod def get_cache_key(self, project_id): return f"projectcodewoners_project_id:1:{project_id}" @classmethod def get_codeowners_cached(self, project_id): """ Cached read access to sentry_projectcodeowners. This method implements a negative cache which saves us a pile of read queries in post_processing as most projects don't have CODEOWNERS. """ cache_key = self.get_cache_key(project_id) codeowners = cache.get(cache_key) if codeowners is None: try: codeowners = self.objects.get(project_id=project_id) except self.DoesNotExist: codeowners = False cache.set(cache_key, codeowners, READ_CACHE_DURATION) return codeowners or None
class FeatureAdoption(Model): __core__ = False organization = FlexibleForeignKey("sentry.Organization") feature_id = models.PositiveIntegerField( choices=[(f.id, six.text_type(f.name)) for f in manager.all()] ) date_completed = models.DateTimeField(default=timezone.now) complete = models.BooleanField(default=False) applicable = models.BooleanField(default=True) # Is this feature applicable to this team? data = JSONField() objects = FeatureAdoptionManager() __repr__ = sane_repr("organization_id", "feature_id", "complete", "applicable") class Meta: app_label = "sentry" db_table = "sentry_featureadoption" unique_together = (("organization", "feature_id"),)
class FeatureAdoption(Model): __core__ = False organization = FlexibleForeignKey('sentry.Organization') feature_id = models.PositiveIntegerField(choices=[(f.id, f.name) for f in manager.all()]) date_completed = models.DateTimeField(default=timezone.now) complete = models.BooleanField(default=False) applicable = models.BooleanField( default=True) # Is this feature applicable to this team? data = JSONField() objects = FeatureAdoptionManager() __repr__ = sane_repr('organization_id', 'feature_id', 'complete', 'applicable') class Meta: app_label = 'sentry' db_table = 'sentry_featureadoption' unique_together = (('organization', 'feature_id'), )
class PromptsActivity(Model): """ Records user interaction with various feature prompts in product""" __core__ = False organization_id = BoundedPositiveIntegerField(db_index=True) # Not a Foreign Key because it's no longer safe to take out lock on Project table in Prod project_id = BoundedPositiveIntegerField(db_index=True) user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=False) feature = models.CharField(max_length=64, null=False) # typically will include a dismissed/snoozed timestamp or something similar data = JSONField(default={}) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_promptsactivity" unique_together = (("user", "feature", "organization_id", "project_id"), ) __repr__ = sane_repr("user_id", "feature")
class WidgetDataSource(Model): """ Deprecated widget class. Will be removed very soon. """ __core__ = True widget = FlexibleForeignKey("sentry.Widget", db_constraint=False, db_index=False) name = models.CharField(max_length=255) type = BoundedPositiveIntegerField(choices=[(0, "discover_saved_search")]) data = JSONField(default={}) # i.e. saved discover query order = BoundedPositiveIntegerField() date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField( default=ObjectStatus.VISIBLE, choices=ObjectStatus.as_choices() ) class Meta: app_label = "sentry" db_table = "sentry_widgetdatasource" unique_together = (("widget", "name"), ("widget", "order")) __repr__ = sane_repr("widget", "type", "name")
class WidgetDataSource(Model): """ A dashboard widget. """ __core__ = True widget = FlexibleForeignKey("sentry.Widget") type = BoundedPositiveIntegerField(choices=WidgetDataSourceTypes.as_choices()) name = models.CharField(max_length=255) data = JSONField(default={}) # i.e. saved discover query order = BoundedPositiveIntegerField() date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField( default=ObjectStatus.VISIBLE, choices=ObjectStatus.as_choices() ) class Meta: app_label = "sentry" db_table = "sentry_widgetdatasource" unique_together = (("widget", "name"), ("widget", "order")) __repr__ = sane_repr("widget", "type", "name")
class Widget(Model): """ Deprecated widget class. Will be removed very soon. """ __core__ = True dashboard = FlexibleForeignKey("sentry.Dashboard", db_constraint=False, db_index=False) order = BoundedPositiveIntegerField() title = models.CharField(max_length=255) display_type = BoundedPositiveIntegerField(choices=DashboardWidgetDisplayTypes.as_choices()) display_options = JSONField(default={}) date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField( default=ObjectStatus.VISIBLE, choices=ObjectStatus.as_choices() ) class Meta: app_label = "sentry" db_table = "sentry_widget" unique_together = (("dashboard", "order"), ("dashboard", "title")) __repr__ = sane_repr("dashboard", "title")
class File(Model): __core__ = False name = models.TextField() type = models.CharField(max_length=64) timestamp = models.DateTimeField(default=timezone.now, db_index=True) headers = JSONField() blobs = models.ManyToManyField("sentry.FileBlob", through="sentry.FileBlobIndex") size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, null=True, db_index=True) # <Legacy fields> # Remove in 8.1 blob = FlexibleForeignKey("sentry.FileBlob", null=True, related_name="legacy_blob") path = models.TextField(null=True) # </Legacy fields> class Meta: app_label = "sentry" db_table = "sentry_file" def _get_chunked_blob(self, mode=None, prefetch=False, prefetch_to=None, delete=True): return ChunkedFileBlobIndexWrapper( FileBlobIndex.objects.filter(file=self).select_related("blob").order_by("offset"), mode=mode, prefetch=prefetch, prefetch_to=prefetch_to, delete=delete, ) def getfile(self, mode=None, prefetch=False): """Returns a file object. By default the file is fetched on demand but if prefetch is enabled the file is fully prefetched into a tempfile before reading can happen. """ impl = self._get_chunked_blob(mode, prefetch) return FileObj(impl, self.name) def save_to(self, path): """Fetches the file and emplaces it at a certain location. The write is done atomically to a tempfile first and then moved over. If the directory does not exist it is created. """ path = os.path.abspath(path) base = os.path.dirname(path) try: os.makedirs(base) except OSError: pass f = None try: f = self._get_chunked_blob( prefetch=True, prefetch_to=base, delete=False ).detach_tempfile() # pre-emptively check if the file already exists. # this can happen as a race condition if two processes/threads # are trying to cache the same file and both try to write # at the same time, overwriting each other. Normally this is fine, # but can cause an issue if another process has opened the file # for reading, then the file that was being read gets clobbered. # I don't know if this affects normal filesystems, but it # definitely has an issue if the filesystem is NFS. if not os.path.exists(path): os.rename(f.name, path) f.close() f = None finally: if f is not None: f.close() try: os.remove(f.name) except Exception: pass def putfile(self, fileobj, blob_size=DEFAULT_BLOB_SIZE, commit=True, logger=nooplogger): """ Save a fileobj into a number of chunks. Returns a list of `FileBlobIndex` items. >>> indexes = file.putfile(fileobj) """ results = [] offset = 0 checksum = sha1(b"") while True: contents = fileobj.read(blob_size) if not contents: break checksum.update(contents) blob_fileobj = ContentFile(contents) blob = FileBlob.from_file(blob_fileobj, logger=logger) results.append(FileBlobIndex.objects.create(file=self, blob=blob, offset=offset)) offset += blob.size self.size = offset self.checksum = checksum.hexdigest() metrics.timing("filestore.file-size", offset) if commit: self.save() return results def assemble_from_file_blob_ids(self, file_blob_ids, checksum, commit=True): """ This creates a file, from file blobs and returns a temp file with the contents. """ tf = tempfile.NamedTemporaryFile() with transaction.atomic(): file_blobs = FileBlob.objects.filter(id__in=file_blob_ids).all() # Ensure blobs are in the order and duplication as provided blobs_by_id = {blob.id: blob for blob in file_blobs} file_blobs = [blobs_by_id[blob_id] for blob_id in file_blob_ids] new_checksum = sha1(b"") offset = 0 for blob in file_blobs: FileBlobIndex.objects.create(file=self, blob=blob, offset=offset) for chunk in blob.getfile().chunks(): new_checksum.update(chunk) tf.write(chunk) offset += blob.size self.size = offset self.checksum = new_checksum.hexdigest() if checksum != self.checksum: raise AssembleChecksumMismatch("Checksum mismatch") metrics.timing("filestore.file-size", offset) if commit: self.save() tf.flush() tf.seek(0) return tf def delete(self, *args, **kwargs): blob_ids = [blob.id for blob in self.blobs.all()] super().delete(*args, **kwargs) # Wait to delete blobs. This helps prevent # races around frequently used blobs in debug images and release files. transaction.on_commit( lambda: delete_unreferenced_blobs.apply_async( kwargs={"blob_ids": blob_ids}, countdown=60 * 5 ) )
class ProjectKey(Model): __core__ = True project = FlexibleForeignKey("sentry.Project", related_name="key_set") label = models.CharField(max_length=64, blank=True, null=True) public_key = models.CharField(max_length=32, unique=True, null=True) secret_key = models.CharField(max_length=32, unique=True, null=True) roles = BitField( flags=( # access to post events to the store endpoint ("store", "Event API access"), # read/write access to rest API ("api", "Web API access"), ), default=["store"], ) status = BoundedPositiveIntegerField( default=0, choices=( (ProjectKeyStatus.ACTIVE, _("Active")), (ProjectKeyStatus.INACTIVE, _("Inactive")), ), db_index=True, ) date_added = models.DateTimeField(default=timezone.now, null=True) rate_limit_count = BoundedPositiveIntegerField(null=True) rate_limit_window = BoundedPositiveIntegerField(null=True) objects = BaseManager( cache_fields=("public_key", "secret_key"), # store projectkeys in memcached for longer than other models, # specifically to make the relay_projectconfig endpoint faster. cache_ttl=60 * 30, ) data = JSONField() # support legacy project keys in API scopes = ( "project:read", "project:write", "project:admin", "project:releases", "event:read", "event:write", "event:admin", ) class Meta: app_label = "sentry" db_table = "sentry_projectkey" __repr__ = sane_repr("project_id", "public_key") def __unicode__(self): return six.text_type(self.public_key) @classmethod def generate_api_key(cls): return uuid4().hex @classmethod def looks_like_api_key(cls, key): return bool(_uuid4_re.match(key)) @classmethod def from_dsn(cls, dsn): urlparts = urlparse(dsn) public_key = urlparts.username project_id = urlparts.path.rsplit("/", 1)[-1] try: return ProjectKey.objects.get(public_key=public_key, project=project_id) except ValueError: # ValueError would come from a non-integer project_id, # which is obviously a DoesNotExist. We catch and rethrow this # so anything downstream expecting DoesNotExist works fine raise ProjectKey.DoesNotExist( "ProjectKey matching query does not exist.") @classmethod def get_default(cls, project): return cls.objects.filter( project=project, roles=models.F("roles").bitor(cls.roles.store), status=ProjectKeyStatus.ACTIVE, ).first() @property def is_active(self): return self.status == ProjectKeyStatus.ACTIVE @property def rate_limit(self): if self.rate_limit_count and self.rate_limit_window: return (self.rate_limit_count, self.rate_limit_window) return (0, 0) def save(self, *args, **kwargs): if not self.public_key: self.public_key = ProjectKey.generate_api_key() if not self.secret_key: self.secret_key = ProjectKey.generate_api_key() if not self.label: self.label = petname.Generate(2, " ", letters=10).title() super(ProjectKey, self).save(*args, **kwargs) def get_dsn(self, domain=None, secure=True, public=False): urlparts = urlparse(self.get_endpoint(public=public)) if not public: key = "%s:%s" % (self.public_key, self.secret_key) else: key = self.public_key # If we do not have a scheme or domain/hostname, dsn is never valid if not urlparts.netloc or not urlparts.scheme: return "" return "%s://%s@%s/%s" % ( urlparts.scheme, key, urlparts.netloc + urlparts.path, self.project_id, ) @property def organization_id(self): return self.project.organization_id @property def organization(self): return self.project.organization @property def dsn_private(self): return self.get_dsn(public=False) @property def dsn_public(self): return self.get_dsn(public=True) @property def csp_endpoint(self): endpoint = self.get_endpoint() return "%s%s?sentry_key=%s" % ( endpoint, reverse("sentry-api-csp-report", args=[self.project_id]), self.public_key, ) @property def security_endpoint(self): endpoint = self.get_endpoint() return "%s%s?sentry_key=%s" % ( endpoint, reverse("sentry-api-security-report", args=[self.project_id]), self.public_key, ) @property def minidump_endpoint(self): endpoint = self.get_endpoint() return "%s%s/?sentry_key=%s" % ( endpoint, reverse("sentry-api-minidump", args=[self.project_id]), self.public_key, ) @property def unreal_endpoint(self): return self.get_endpoint() + reverse( "sentry-api-unreal", args=[self.project_id, self.public_key]) @property def js_sdk_loader_cdn_url(self): if settings.JS_SDK_LOADER_CDN_URL: return "%s%s.min.js" % (settings.JS_SDK_LOADER_CDN_URL, self.public_key) else: endpoint = self.get_endpoint() return "%s%s" % ( endpoint, reverse("sentry-js-sdk-loader", args=[self.public_key, ".min" ]), ) def get_endpoint(self, public=True): if public: endpoint = settings.SENTRY_PUBLIC_ENDPOINT or settings.SENTRY_ENDPOINT else: endpoint = settings.SENTRY_ENDPOINT if not endpoint: endpoint = options.get("system.url-prefix") if features.has("organizations:org-subdomains", self.project.organization): urlparts = urlparse(endpoint) if urlparts.scheme and urlparts.netloc: endpoint = "%s://%s.%s%s" % ( urlparts.scheme, settings.SENTRY_ORG_SUBDOMAIN_TEMPLATE.format( organization_id=self.project.organization_id), urlparts.netloc, urlparts.path, ) return endpoint def get_allowed_origins(self): from sentry.utils.http import get_origins return get_origins(self.project) def get_audit_log_data(self): return { "label": self.label, "public_key": self.public_key, "secret_key": self.secret_key, "roles": int(self.roles), "status": self.status, "rate_limit_count": self.rate_limit_count, "rate_limit_window": self.rate_limit_window, } def get_scopes(self): return self.scopes
class Repository(Model, PendingDeletionMixin): __include_in_export__ = True organization_id = BoundedPositiveIntegerField(db_index=True) name = models.CharField(max_length=200) url = models.URLField(null=True) provider = models.CharField(max_length=64, null=True) external_id = models.CharField(max_length=64, null=True) config = JSONField(default=dict) status = BoundedPositiveIntegerField(default=ObjectStatus.VISIBLE, choices=ObjectStatus.as_choices(), db_index=True) date_added = models.DateTimeField(default=timezone.now) integration_id = BoundedPositiveIntegerField(db_index=True, null=True) class Meta: app_label = "sentry" db_table = "sentry_repository" unique_together = ( ("organization_id", "name"), ("organization_id", "provider", "external_id"), ) __repr__ = sane_repr("organization_id", "name", "provider") _rename_fields_on_pending_delete = frozenset(["name", "external_id"]) def has_integration_provider(self): return self.provider and self.provider.startswith("integrations:") def get_provider(self): from sentry.plugins.base import bindings if self.has_integration_provider(): provider_cls = bindings.get("integration-repository.provider").get( self.provider) return provider_cls(self.provider) provider_cls = bindings.get("repository.provider").get(self.provider) return provider_cls(self.provider) def generate_delete_fail_email(self, error_message): from sentry.utils.email import MessageBuilder new_context = { "repo": self, "error_message": error_message, "provider_name": self.get_provider().name, } return MessageBuilder( subject="Unable to Delete Repository Webhooks", context=new_context, template="sentry/emails/unable-to-delete-repo.txt", html_template="sentry/emails/unable-to-delete-repo.html", ) def rename_on_pending_deletion( self, fields: set[str] | None = None, extra_fields_to_save: list[str] | None = None, ) -> None: # Due to the fact that Repository is shown to the user # as it is pending deletion, this is added to display the fields # correctly to the user. self.config["pending_deletion_name"] = self.name super().rename_on_pending_deletion(fields, ["config"]) def reset_pending_deletion_field_names( self, extra_fields_to_save: list[str] | None = None, ) -> bool: del self.config["pending_deletion_name"] return super().reset_pending_deletion_field_names(["config"])
class OrganizationOnboardingTask(Model): """ Onboarding tasks walk new Sentry orgs through basic features of Sentry. """ __core__ = False TASK_CHOICES = ( (OnboardingTask.FIRST_PROJECT, "create_project"), (OnboardingTask.FIRST_EVENT, "send_first_event"), (OnboardingTask.INVITE_MEMBER, "invite_member"), (OnboardingTask.SECOND_PLATFORM, "setup_second_platform"), (OnboardingTask.USER_CONTEXT, "setup_user_context"), (OnboardingTask.RELEASE_TRACKING, "setup_release_tracking"), (OnboardingTask.SOURCEMAPS, "setup_sourcemaps"), (OnboardingTask.USER_REPORTS, "setup_user_reports"), (OnboardingTask.ISSUE_TRACKER, "setup_issue_tracker"), (OnboardingTask.ALERT_RULE, "setup_alert_rules"), (OnboardingTask.FIRST_TRANSACTION, "setup_transactions"), ) STATUS_CHOICES = ( (OnboardingTaskStatus.COMPLETE, "complete"), (OnboardingTaskStatus.PENDING, "pending"), (OnboardingTaskStatus.SKIPPED, "skipped"), ) # Used in the API to map IDs to string keys. This keeps things # a bit more maintainable on the frontend. TASK_KEY_MAP = dict(TASK_CHOICES) TASK_LOOKUP_BY_KEY = {v: k for k, v in TASK_CHOICES} STATUS_KEY_MAP = dict(STATUS_CHOICES) STATUS_LOOKUP_BY_KEY = {v: k for k, v in STATUS_CHOICES} # Tasks which must be completed for the onboarding to be considered # complete. REQUIRED_ONBOARDING_TASKS = frozenset([ OnboardingTask.FIRST_PROJECT, OnboardingTask.FIRST_EVENT, OnboardingTask.INVITE_MEMBER, OnboardingTask.SECOND_PLATFORM, OnboardingTask.USER_CONTEXT, OnboardingTask.RELEASE_TRACKING, OnboardingTask.SOURCEMAPS, OnboardingTask.ISSUE_TRACKER, OnboardingTask.ALERT_RULE, OnboardingTask.FIRST_TRANSACTION, ]) SKIPPABLE_TASKS = frozenset([ OnboardingTask.INVITE_MEMBER, OnboardingTask.SECOND_PLATFORM, OnboardingTask.USER_CONTEXT, OnboardingTask.RELEASE_TRACKING, OnboardingTask.SOURCEMAPS, OnboardingTask.USER_REPORTS, OnboardingTask.ISSUE_TRACKER, OnboardingTask.ALERT_RULE, OnboardingTask.FIRST_TRANSACTION, ]) organization = FlexibleForeignKey("sentry.Organization") user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.SET_NULL) # user that completed task = BoundedPositiveIntegerField(choices=[(k, str(v)) for k, v in TASK_CHOICES]) status = BoundedPositiveIntegerField(choices=[(k, str(v)) for k, v in STATUS_CHOICES]) completion_seen = models.DateTimeField(null=True) date_completed = models.DateTimeField(default=timezone.now) project = FlexibleForeignKey("sentry.Project", db_constraint=False, null=True) data = JSONField() # INVITE_MEMBER { invited_member: user.id } objects = OrganizationOnboardingTaskManager() class Meta: app_label = "sentry" db_table = "sentry_organizationonboardingtask" unique_together = (("organization", "task"), ) __repr__ = sane_repr("organization", "task")
class ExportedData(Model): """ Stores references to asynchronous data export jobs """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.SET_NULL) file = FlexibleForeignKey("sentry.File", null=True, db_constraint=False, on_delete=models.SET_NULL) date_added = models.DateTimeField(default=timezone.now) date_finished = models.DateTimeField(null=True) date_expired = models.DateTimeField(null=True, db_index=True) query_type = BoundedPositiveIntegerField( choices=ExportQueryType.as_choices()) query_info = JSONField() @property def status(self): if self.date_finished is None: return ExportStatus.Early elif self.date_expired < timezone.now(): return ExportStatus.Expired else: return ExportStatus.Valid @property def payload(self): payload = self.query_info.copy() payload["export_type"] = ExportQueryType.as_str(self.query_type) return payload @property def file_name(self): date = self.date_added.strftime("%Y-%B-%d") export_type = ExportQueryType.as_str(self.query_type) # Example: Discover_2020-July-21_27.csv return "{}_{}_{}.csv".format(export_type, date, self.id) @staticmethod def format_date(date): # Example: 12:21 PM on July 21, 2020 (UTC) return None if date is None else date.strftime( "%-I:%M %p on %B %d, %Y (%Z)") def delete_file(self): if self.file: self.file.delete() def delete(self, *args, **kwargs): self.delete_file() super(ExportedData, self).delete(*args, **kwargs) def finalize_upload(self, file, expiration=DEFAULT_EXPIRATION): self.delete_file() # If a file is present, remove it current_time = timezone.now() expire_time = current_time + expiration self.update(file=file, date_finished=current_time, date_expired=expire_time) self.email_success() def email_success(self): from sentry.utils.email import MessageBuilder # The following condition should never be true, but it's a safeguard in case someone manually calls this method if self.date_finished is None or self.date_expired is None or self.file is None: logger.warning( "Notification email attempted on incomplete dataset", extra={ "data_export_id": self.id, "organization_id": self.organization_id }, ) return url = absolute_uri( reverse("sentry-data-export-details", args=[self.organization.slug, self.id])) msg = MessageBuilder( subject="Your data is ready.", context={ "url": url, "expiration": self.format_date(self.date_expired) }, type="organization.export-data", template="sentry/emails/data-export-success.txt", html_template="sentry/emails/data-export-success.html", ) msg.send_async([self.user.email]) metrics.incr("dataexport.end", tags={"success": True}, sample_rate=1.0) def email_failure(self, message): from sentry.utils.email import MessageBuilder msg = MessageBuilder( subject="We couldn't export your data.", context={ "creation": self.format_date(self.date_added), "error_message": message, "payload": json.dumps(self.payload, indent=2, sort_keys=True), }, type="organization.export-data", template="sentry/emails/data-export-failure.txt", html_template="sentry/emails/data-export-failure.html", ) msg.send_async([self.user.email]) metrics.incr("dataexport.end", tags={"success": False}, sample_rate=1.0) self.delete() class Meta: app_label = "sentry" db_table = "sentry_exporteddata" __repr__ = sane_repr("query_type", "query_info")
class Release(Model): """ A release is generally created when a new version is pushed into a production state. """ __core__ = False organization = FlexibleForeignKey('sentry.Organization') projects = models.ManyToManyField('sentry.Project', related_name='releases', through=ReleaseProject) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey('sentry.User', null=True, blank=True, on_delete=models.SET_NULL) # materialized stats commit_count = BoundedPositiveIntegerField(null=True, default=0) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True, default=0) last_deploy_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_release' unique_together = (('organization', 'version'), ) __repr__ = sane_repr('organization_id', 'version') @staticmethod def is_valid_version(value): return not (any(c in value for c in BAD_RELEASE_CHARS) or value in ('.', '..') or not value or value.lower() == 'latest') @classmethod def get_cache_key(cls, organization_id, version): return 'release:3:%s:%s' % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return u'releasecommits:{}:{}'.format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version, ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ('%s-%s' % (project.slug, version))[:DB_VERSION_LENGTH] releases = list( cls.objects.filter(organization_id=project.organization_id, version__in=[version, project_version], projects=project)) if releases: try: release = [ r for r in releases if r.version == project_version ][0] except IndexError: release = releases[0] else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) except IntegrityError: release = cls.objects.get( organization_id=project.organization_id, version=version) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F('flags').bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) return release @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import (ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, Group, GroupRelease, GroupResolution) model_list = (ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution) for release in from_releases: for model in model_list: if hasattr(model, 'release'): update_kwargs = {'release': to_release} else: update_kwargs = {'release_id': to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update( **update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update( **update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update( first_release=to_release) release.delete() @property def short_version(self): return Release.get_display_version(self.version) @staticmethod def get_display_version(version): match = _dotted_path_prefix_re.match(version) if match is not None: version = version[match.end():] if _sha1_re.match(version): return version[:7] return version def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create(release=self, name=name, defaults={ 'date_added': date_added, 'organization_id': self.organization_id, })[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F('flags').bitor( Project.flags.has_releases), ) except IntegrityError: return False else: return True def handle_commit_ranges(self, refs): """ Takes commit refs of the form: [ { 'previousCommit': None, 'commit': 'previous_commit..commit', } ] Note: Overwrites 'previousCommit' and 'commit' """ for ref in refs: if COMMIT_RANGE_DELIMITER in ref['commit']: ref['previousCommit'], ref['commit'] = ref['commit'].split( COMMIT_RANGE_DELIMITER) def set_refs(self, refs, user, fetch=False): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = type(self).objects.filter( organization_id=self.organization_id, projects__in=self.projects.all(), ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).exclude(version=self.version).order_by('-sort').first() names = {r['repository'] for r in refs} repos = list( Repository.objects.filter( organization_id=self.organization_id, name__in=names, )) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository('Invalid repository names: %s' % ','.join(invalid_repos)) self.handle_commit_ranges(refs) for ref in refs: repo = repos_by_name[ref['repository']] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref['commit'], )[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={ 'commit': commit, }) if fetch: fetch_commits.apply_async( kwargs={ 'release_id': self.id, 'user_id': user.id, 'refs': refs, 'prev_release_id': prev_release and prev_release.id, }) def set_commits(self, commit_list): """ Bind a list of commits to this release. This will clear any existing commit log and replace it with the given commits. """ # Sort commit list in reverse order commit_list.sort(key=lambda commit: commit.get('timestamp'), reverse=True) # TODO(dcramer): this function could use some cleanup/refactoring as its a bit unwieldly from sentry.models import (Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest) from sentry.plugins.providers.repository import RepositoryProvider from sentry.tasks.integrations import kick_off_status_syncs # todo(meredith): implement for IntegrationRepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get('message', '')) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): start = time() with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter(release=self, ).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get( 'repository') or u'organization-{}'.format( self.organization_id) if repo_name not in repos: repos[ repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name, )[0] else: repo = repos[repo_name] author_email = data.get('author_email') if author_email is None and data.get('author_name'): author_email = (re.sub(r'[^a-zA-Z0-9\-_\.]*', '', data['author_name']).lower() + '@localhost') if not author_email: author = None elif author_email not in authors: author_data = {'name': data.get('author_name')} author, created = CommitAuthor.objects.create_or_update( organization_id=self.organization_id, email=author_email, values=author_data) if not created: author = CommitAuthor.objects.get( organization_id=self.organization_id, email=author_email) authors[author_email] = author else: author = authors[author_email] commit_data = {} defaults = {} # Update/set message and author if they are provided. if author is not None: commit_data['author'] = author if 'message' in data: commit_data['message'] = data['message'] if 'timestamp' in data: commit_data['date_added'] = data['timestamp'] else: defaults['date_added'] = timezone.now() commit, created = Commit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, key=data['id'], defaults=defaults, values=commit_data) if not created: commit = Commit.objects.get( organization_id=self.organization_id, repository_id=repo.id, key=data['id']) if author is None: author = commit.author commit_author_by_commit[commit.id] = author patch_set = data.get('patch_set', []) for patched_file in patch_set: try: with transaction.atomic(): CommitFileChange.objects.create( organization_id=self.organization.id, commit=commit, filename=patched_file['path'], type=patched_file['type'], ) except IntegrityError: pass try: with transaction.atomic(): ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) except IntegrityError: pass if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False, ).values_list('commit__author_id', flat=True).distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) metrics.timing('release.set_commits.duration', time() - start) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in six.iteritems(head_commit_by_repo): try: with transaction.atomic(): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list( ReleaseCommit.objects.filter( release=self).select_related('commit').values( 'commit_id', 'commit__key')) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc['commit_id'] for rc in release_commits], ).values_list('group_id', 'linked_id')) commit_group_authors = [ ( cr[0], # group_id commit_author_by_commit.get(cr[1])) for cr in commit_resolutions ] pr_ids_by_merge_commit = list( PullRequest.objects.filter( merge_commit_sha__in=[ rc['commit__key'] for rc in release_commits ], organization_id=self.organization_id, ).values_list('id', flat=True)) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list('group_id', 'linked_id')) pr_authors = list( PullRequest.objects.filter(id__in=[ prr[1] for prr in pull_request_resolutions ], ).select_related('author')) pr_authors_dict = {pra.id: pra.author for pra in pr_authors} pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions] user_by_author = {None: None} commits_and_prs = list( itertools.chain(commit_group_authors, pull_request_group_authors), ) group_project_lookup = dict( Group.objects.filter(id__in=[ group_id for group_id, _ in commits_and_prs ], ).values_list('id', 'project_id')) for group_id, author in commits_and_prs: if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ 'release': self, 'type': GroupResolution.Type.in_release, 'status': GroupResolution.Status.resolved, 'actor_id': actor.id if actor else None, }, ) group = Group.objects.get(id=group_id, ) group.update(status=GroupStatus.RESOLVED) metrics.incr('group.resolved', instance='in_commit', skip_internal=True) issue_resolved.send_robust( organization_id=self.organization_id, user=actor, group=group, project=group.project, resolution_type='with_commit', sender=type(self), ) kick_off_status_syncs.apply_async( kwargs={ 'project_id': group_project_lookup[group_id], 'group_id': group_id, })
class AlertRuleTriggerAction(Model): """ This model represents an action that occurs when a trigger is fired. This is typically some sort of notification. """ __include_in_export__ = True _type_registrations = {} # Which sort of action to take class Type(Enum): EMAIL = 0 PAGERDUTY = 1 SLACK = 2 MSTEAMS = 3 SENTRY_APP = 4 INTEGRATION_TYPES = frozenset( (Type.PAGERDUTY.value, Type.SLACK.value, Type.MSTEAMS.value)) class TargetType(Enum): # A direct reference, like an email address, Slack channel, or PagerDuty service SPECIFIC = 0 # A specific user. This could be used to grab the user's email address. USER = 1 # A specific team. This could be used to send an email to everyone associated # with a team. TEAM = 2 # A Sentry App instead of any of the above. SENTRY_APP = 3 TypeRegistration = namedtuple( "TypeRegistration", [ "handler", "slug", "type", "supported_target_types", "integration_provider" ], ) alert_rule_trigger = FlexibleForeignKey("sentry.AlertRuleTrigger") integration = FlexibleForeignKey("sentry.Integration", null=True) sentry_app = FlexibleForeignKey("sentry.SentryApp", null=True) type = models.SmallIntegerField() target_type = models.SmallIntegerField() # Identifier used to perform the action on a given target target_identifier = models.TextField(null=True) # Human readable name to display in the UI target_display = models.TextField(null=True) date_added = models.DateTimeField(default=timezone.now) sentry_app_config = JSONField(null=True) class Meta: app_label = "sentry" db_table = "sentry_alertruletriggeraction" @property def target(self): if self.target_type == self.TargetType.USER.value: try: return User.objects.get(id=int(self.target_identifier)) except User.DoesNotExist: pass elif self.target_type == self.TargetType.TEAM.value: try: return Team.objects.get(id=int(self.target_identifier)) except Team.DoesNotExist: pass elif self.target_type == self.TargetType.SPECIFIC.value: # TODO: This is only for email. We should have a way of validating that it's # ok to contact this email. return self.target_identifier def build_handler(self, action, incident, project): type = AlertRuleTriggerAction.Type(self.type) if type in self._type_registrations: return self._type_registrations[type].handler( action, incident, project) else: metrics.incr(f"alert_rule_trigger.unhandled_type.{self.type}") def fire(self, action, incident, project, metric_value): handler = self.build_handler(action, incident, project) if handler: return handler.fire(metric_value) def resolve(self, action, incident, project, metric_value): handler = self.build_handler(action, incident, project) if handler: return handler.resolve(metric_value) @classmethod def register_type(cls, slug, type, supported_target_types, integration_provider=None): """ Registers a handler for a given type. :param slug: A string representing the name of this type registration :param type: The `Type` to handle. :param handler: A subclass of `ActionHandler` that accepts the `AlertRuleTriggerAction` and `Incident`. :param integration_provider: String representing the integration provider related to this type. """ def inner(handler): if type not in cls._type_registrations: cls._type_registrations[type] = cls.TypeRegistration( handler, slug, type, frozenset(supported_target_types), integration_provider) else: raise Exception("Handler already registered for type %s" % type) return handler return inner @classmethod def get_registered_type(cls, type): return cls._type_registrations[type] @classmethod def get_registered_types(cls): return list(cls._type_registrations.values())
class Release(Model): """ A release is generally created when a new version is pushed into a production state. A commit is generally a git commit. See also releasecommit.py """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") projects = models.ManyToManyField("sentry.Project", related_name="releases", through=ReleaseProject) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) # new issues (groups) that arise as a consequence of this release new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey("sentry.User", null=True, blank=True, on_delete=models.SET_NULL) # materialized stats commit_count = BoundedPositiveIntegerField(null=True, default=0) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True, default=0) last_deploy_id = BoundedPositiveIntegerField(null=True) # HACK HACK HACK # As a transitionary step we permit release rows to exist multiple times # where they are "specialized" for a specific project. The goal is to # later split up releases by project again. This is for instance used # by the org release listing. _for_project_id = None class Meta: app_label = "sentry" db_table = "sentry_release" unique_together = (("organization", "version"), ) __repr__ = sane_repr("organization_id", "version") def __eq__(self, other): """Make sure that specialized releases are only comparable to the same other specialized release. This for instance lets us treat them separately for serialization purposes. """ return Model.__eq__( self, other) and self._for_project_id == other._for_project_id @staticmethod def is_valid_version(value): return not (not value or any(c in value for c in BAD_RELEASE_CHARS) or value in (".", "..") or value.lower() == "latest") @classmethod def get_cache_key(cls, organization_id, version): return "release:3:%s:%s" % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return u"releasecommits:{}:{}".format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): with metrics.timer("models.release.get_or_create") as metric_tags: return cls._get_or_create_impl(project, version, date_added, metric_tags) @classmethod def _get_or_create_impl(cls, project, version, date_added, metric_tags): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ("%s-%s" % (project.slug, version))[:DB_VERSION_LENGTH] releases = list( cls.objects.filter( organization_id=project.organization_id, version__in=[version, project_version], projects=project, )) if releases: try: release = [ r for r in releases if r.version == project_version ][0] except IndexError: release = releases[0] metric_tags["created"] = "false" else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) metric_tags["created"] = "true" except IntegrityError: metric_tags["created"] = "false" release = cls.objects.get( organization_id=project.organization_id, version=version) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F("flags").bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) metric_tags["cache_hit"] = "false" else: metric_tags["cache_hit"] = "true" return release @cached_property def version_info(self): try: return parse_release(self.version) except RelayError: # This can happen on invalid legacy releases return None @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, Group, GroupRelease, GroupResolution, ) model_list = ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution, ) for release in from_releases: for model in model_list: if hasattr(model, "release"): update_kwargs = {"release": to_release} else: update_kwargs = {"release_id": to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update( **update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update( **update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update( first_release=to_release) release.delete() def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create( release=self, name=name, defaults={ "date_added": date_added, "organization_id": self.organization_id }, )[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F("flags").bitor(Project.flags.has_releases)) except IntegrityError: return False else: return True def handle_commit_ranges(self, refs): """ Takes commit refs of the form: [ { 'previousCommit': None, 'commit': 'previous_commit..commit', } ] Note: Overwrites 'previousCommit' and 'commit' """ for ref in refs: if COMMIT_RANGE_DELIMITER in ref["commit"]: ref["previousCommit"], ref["commit"] = ref["commit"].split( COMMIT_RANGE_DELIMITER) def set_refs(self, refs, user, fetch=False): with sentry_sdk.start_span(op="set_refs"): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = (type(self).objects.filter( organization_id=self.organization_id, projects__in=self.projects.all()).extra( select={ "sort": "COALESCE(date_released, date_added)" }).exclude(version=self.version).order_by("-sort").first()) names = {r["repository"] for r in refs} repos = list( Repository.objects.filter(organization_id=self.organization_id, name__in=names)) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository("Invalid repository names: %s" % ",".join(invalid_repos)) self.handle_commit_ranges(refs) for ref in refs: repo = repos_by_name[ref["repository"]] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref["commit"])[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={"commit": commit}, ) if fetch: fetch_commits.apply_async( kwargs={ "release_id": self.id, "user_id": user.id, "refs": refs, "prev_release_id": prev_release and prev_release.id, }) def set_commits(self, commit_list): """ Bind a list of commits to this release. This will clear any existing commit log and replace it with the given commits. """ # Sort commit list in reverse order commit_list.sort(key=lambda commit: commit.get("timestamp", 0), reverse=True) # TODO(dcramer): this function could use some cleanup/refactoring as it's a bit unwieldy from sentry.models import ( Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest, ) from sentry.plugins.providers.repository import RepositoryProvider from sentry.tasks.integrations import kick_off_status_syncs # todo(meredith): implement for IntegrationRepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get("message", "")) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): start = time() with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter(release=self).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get( "repository") or u"organization-{}".format( self.organization_id) if repo_name not in repos: repos[ repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name)[0] else: repo = repos[repo_name] author_email = data.get("author_email") if author_email is None and data.get("author_name"): author_email = (re.sub(r"[^a-zA-Z0-9\-_\.]*", "", data["author_name"]).lower() + "@localhost") author_email = truncatechars(author_email, 75) if not author_email: author = None elif author_email not in authors: author_data = {"name": data.get("author_name")} author, created = CommitAuthor.objects.get_or_create( organization_id=self.organization_id, email=author_email, defaults=author_data, ) if author.name != author_data["name"]: author.update(name=author_data["name"]) authors[author_email] = author else: author = authors[author_email] commit_data = {} # Update/set message and author if they are provided. if author is not None: commit_data["author"] = author if "message" in data: commit_data["message"] = data["message"] if "timestamp" in data: commit_data["date_added"] = data["timestamp"] commit, created = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=data["id"], defaults=commit_data, ) if not created: commit_data = { key: value for key, value in six.iteritems(commit_data) if getattr(commit, key) != value } if commit_data: commit.update(**commit_data) if author is None: author = commit.author commit_author_by_commit[commit.id] = author # Guard against patch_set being None patch_set = data.get("patch_set") or [] for patched_file in patch_set: try: with transaction.atomic(): CommitFileChange.objects.create( organization_id=self.organization.id, commit=commit, filename=patched_file["path"], type=patched_file["type"], ) except IntegrityError: pass try: with transaction.atomic(): ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) except IntegrityError: pass if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False). values_list("commit__author_id", flat=True).distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) metrics.timing("release.set_commits.duration", time() - start) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in six.iteritems(head_commit_by_repo): try: with transaction.atomic(): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list( ReleaseCommit.objects.filter( release=self).select_related("commit").values( "commit_id", "commit__key")) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc["commit_id"] for rc in release_commits], ).values_list("group_id", "linked_id")) commit_group_authors = [ (cr[0], commit_author_by_commit.get(cr[1])) for cr in commit_resolutions # group_id ] pr_ids_by_merge_commit = list( PullRequest.objects.filter( merge_commit_sha__in=[ rc["commit__key"] for rc in release_commits ], organization_id=self.organization_id, ).values_list("id", flat=True)) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list("group_id", "linked_id")) pr_authors = list( PullRequest.objects.filter( id__in=[prr[1] for prr in pull_request_resolutions ]).select_related("author")) pr_authors_dict = {pra.id: pra.author for pra in pr_authors} pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions] user_by_author = {None: None} commits_and_prs = list( itertools.chain(commit_group_authors, pull_request_group_authors)) group_project_lookup = dict( Group.objects.filter( id__in=[group_id for group_id, _ in commits_and_prs]).values_list( "id", "project_id")) for group_id, author in commits_and_prs: if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ "release": self, "type": GroupResolution.Type.in_release, "status": GroupResolution.Status.resolved, "actor_id": actor.id if actor else None, }, ) group = Group.objects.get(id=group_id) group.update(status=GroupStatus.RESOLVED) remove_group_from_inbox(group) metrics.incr("group.resolved", instance="in_commit", skip_internal=True) issue_resolved.send_robust( organization_id=self.organization_id, user=actor, group=group, project=group.project, resolution_type="with_commit", sender=type(self), ) kick_off_status_syncs.apply_async( kwargs={ "project_id": group_project_lookup[group_id], "group_id": group_id }) def safe_delete(self): """Deletes a release if possible or raises a `UnsafeReleaseDeletion` exception. """ from sentry.models import Group, ReleaseFile from sentry.snuba.sessions import check_has_health_data # we don't want to remove the first_release metadata on the Group, and # while people might want to kill a release (maybe to remove files), # removing the release is prevented if Group.objects.filter(first_release=self).exists(): raise UnsafeReleaseDeletion(ERR_RELEASE_REFERENCED) # We do not allow releases with health data to be deleted because # the upserting from snuba data would create the release again. # We would need to be able to delete this data from snuba which we # can't do yet. project_ids = list(self.projects.values_list("id").all()) if check_has_health_data([(p[0], self.version) for p in project_ids]): raise UnsafeReleaseDeletion(ERR_RELEASE_HEALTH_DATA) # TODO(dcramer): this needs to happen in the queue as it could be a long # and expensive operation file_list = ReleaseFile.objects.filter( release=self).select_related("file") for releasefile in file_list: releasefile.file.delete() releasefile.delete() self.delete()