class Release(Model): """ A release is generally created when a new version is pushed into a production state. """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") projects = models.ManyToManyField("sentry.Project", related_name="releases", through=ReleaseProject) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey("sentry.User", null=True, blank=True, on_delete=models.SET_NULL) # materialized stats commit_count = BoundedPositiveIntegerField(null=True, default=0) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True, default=0) last_deploy_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = "sentry" db_table = "sentry_release" unique_together = (("organization", "version"), ) __repr__ = sane_repr("organization_id", "version") @staticmethod def is_valid_version(value): return not (any(c in value for c in BAD_RELEASE_CHARS) or value in (".", "..") or not value or value.lower() == "latest") @classmethod def get_cache_key(cls, organization_id, version): return "release:3:%s:%s" % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return u"releasecommits:{}:{}".format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ("%s-%s" % (project.slug, version))[:DB_VERSION_LENGTH] releases = list( cls.objects.filter( organization_id=project.organization_id, version__in=[version, project_version], projects=project, )) if releases: try: release = [ r for r in releases if r.version == project_version ][0] except IndexError: release = releases[0] else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) except IntegrityError: release = cls.objects.get( organization_id=project.organization_id, version=version) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F("flags").bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) return release @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, Group, GroupRelease, GroupResolution, ) model_list = ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution, ) for release in from_releases: for model in model_list: if hasattr(model, "release"): update_kwargs = {"release": to_release} else: update_kwargs = {"release_id": to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update( **update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update( **update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update( first_release=to_release) release.delete() def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create( release=self, name=name, defaults={ "date_added": date_added, "organization_id": self.organization_id }, )[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F("flags").bitor(Project.flags.has_releases)) except IntegrityError: return False else: return True def handle_commit_ranges(self, refs): """ Takes commit refs of the form: [ { 'previousCommit': None, 'commit': 'previous_commit..commit', } ] Note: Overwrites 'previousCommit' and 'commit' """ for ref in refs: if COMMIT_RANGE_DELIMITER in ref["commit"]: ref["previousCommit"], ref["commit"] = ref["commit"].split( COMMIT_RANGE_DELIMITER) def set_refs(self, refs, user, fetch=False): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = (type(self).objects.filter( organization_id=self.organization_id, projects__in=self.projects.all()).extra( select={ "sort": "COALESCE(date_released, date_added)" }).exclude(version=self.version).order_by("-sort").first()) names = {r["repository"] for r in refs} repos = list( Repository.objects.filter(organization_id=self.organization_id, name__in=names)) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository("Invalid repository names: %s" % ",".join(invalid_repos)) self.handle_commit_ranges(refs) for ref in refs: repo = repos_by_name[ref["repository"]] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref["commit"])[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={"commit": commit}, ) if fetch: fetch_commits.apply_async( kwargs={ "release_id": self.id, "user_id": user.id, "refs": refs, "prev_release_id": prev_release and prev_release.id, }) def set_commits(self, commit_list): """ Bind a list of commits to this release. This will clear any existing commit log and replace it with the given commits. """ # Sort commit list in reverse order commit_list.sort(key=lambda commit: commit.get("timestamp"), reverse=True) # TODO(dcramer): this function could use some cleanup/refactoring as it's a bit unwieldy from sentry.models import ( Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest, ) from sentry.plugins.providers.repository import RepositoryProvider from sentry.tasks.integrations import kick_off_status_syncs # todo(meredith): implement for IntegrationRepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get("message", "")) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): start = time() with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important initial_commit_ids = set( ReleaseCommit.objects.filter(release=self).values_list( "commit_id", flat=True)) ReleaseCommit.objects.filter(release=self).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get( "repository") or u"organization-{}".format( self.organization_id) if repo_name not in repos: repos[ repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name)[0] else: repo = repos[repo_name] author_email = data.get("author_email") if author_email is None and data.get("author_name"): author_email = (re.sub(r"[^a-zA-Z0-9\-_\.]*", "", data["author_name"]).lower() + "@localhost") if not author_email: author = None elif author_email not in authors: author_data = {"name": data.get("author_name")} author, created = CommitAuthor.objects.create_or_update( organization_id=self.organization_id, email=author_email, values=author_data, ) if not created: author = CommitAuthor.objects.get( organization_id=self.organization_id, email=author_email) authors[author_email] = author else: author = authors[author_email] commit_data = {} defaults = {} # Update/set message and author if they are provided. if author is not None: commit_data["author"] = author if "message" in data: commit_data["message"] = data["message"] if "timestamp" in data: commit_data["date_added"] = data["timestamp"] else: defaults["date_added"] = timezone.now() commit, created = Commit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, key=data["id"], defaults=defaults, values=commit_data, ) if not created: commit = Commit.objects.get( organization_id=self.organization_id, repository_id=repo.id, key=data["id"], ) if author is None: author = commit.author commit_author_by_commit[commit.id] = author patch_set = data.get("patch_set", []) for patched_file in patch_set: try: with transaction.atomic(): CommitFileChange.objects.create( organization_id=self.organization.id, commit=commit, filename=patched_file["path"], type=patched_file["type"], ) except IntegrityError: pass try: with transaction.atomic(): ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) except IntegrityError: pass if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False). values_list("commit__author_id", flat=True).distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) metrics.timing("release.set_commits.duration", time() - start) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in six.iteritems(head_commit_by_repo): try: with transaction.atomic(): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list( ReleaseCommit.objects.filter( release=self).select_related("commit").values( "commit_id", "commit__key")) final_commit_ids = set(rc["commit_id"] for rc in release_commits) removed_commit_ids = initial_commit_ids - final_commit_ids added_commit_ids = final_commit_ids - initial_commit_ids if removed_commit_ids or added_commit_ids: release_commits_updated.send_robust( release=self, removed_commit_ids=removed_commit_ids, added_commit_ids=added_commit_ids, sender=self.__class__, ) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc["commit_id"] for rc in release_commits], ).values_list("group_id", "linked_id")) commit_group_authors = [ (cr[0], commit_author_by_commit.get(cr[1])) for cr in commit_resolutions # group_id ] pr_ids_by_merge_commit = list( PullRequest.objects.filter( merge_commit_sha__in=[ rc["commit__key"] for rc in release_commits ], organization_id=self.organization_id, ).values_list("id", flat=True)) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list("group_id", "linked_id")) pr_authors = list( PullRequest.objects.filter( id__in=[prr[1] for prr in pull_request_resolutions ]).select_related("author")) pr_authors_dict = {pra.id: pra.author for pra in pr_authors} pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions] user_by_author = {None: None} commits_and_prs = list( itertools.chain(commit_group_authors, pull_request_group_authors)) group_project_lookup = dict( Group.objects.filter( id__in=[group_id for group_id, _ in commits_and_prs]).values_list( "id", "project_id")) for group_id, author in commits_and_prs: if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ "release": self, "type": GroupResolution.Type.in_release, "status": GroupResolution.Status.resolved, "actor_id": actor.id if actor else None, }, ) group = Group.objects.get(id=group_id) group.update(status=GroupStatus.RESOLVED) metrics.incr("group.resolved", instance="in_commit", skip_internal=True) issue_resolved.send_robust( organization_id=self.organization_id, user=actor, group=group, project=group.project, resolution_type="with_commit", sender=type(self), ) kick_off_status_syncs.apply_async( kwargs={ "project_id": group_project_lookup[group_id], "group_id": group_id })
class Project(Model): """ Projects are permission based namespaces which generally are the top level entry point for all data. """ PLATFORM_CHOICES = tuple((p, PLATFORM_TITLES.get(p, p.title())) for p in PLATFORM_LIST) + (('other', 'Other'), ) slug = models.SlugField(null=True) name = models.CharField(max_length=200) organization = FlexibleForeignKey('sentry.Organization') team = FlexibleForeignKey('sentry.Team') public = models.BooleanField(default=False) date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField( default=0, choices=( (ProjectStatus.VISIBLE, _('Active')), (ProjectStatus.PENDING_DELETION, _('Pending Deletion')), (ProjectStatus.DELETION_IN_PROGRESS, _('Deletion in Progress')), ), db_index=True) platform = models.CharField(max_length=32, choices=PLATFORM_CHOICES, null=True) objects = ProjectManager(cache_fields=[ 'pk', 'slug', ]) class Meta: app_label = 'sentry' db_table = 'sentry_project' unique_together = (('team', 'slug'), ('organization', 'slug')) __repr__ = sane_repr('team_id', 'slug') def __unicode__(self): return u'%s (%s)' % (self.name, self.slug) def save(self, *args, **kwargs): if not self.slug: slugify_instance(self, self.name, organization=self.organization) super(Project, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri( reverse('sentry-stream', args=[self.organization.slug, self.slug])) def merge_to(self, project): from sentry.models import (Group, GroupTagValue, Event, TagValue) if not isinstance(project, Project): project = Project.objects.get_from_cache(pk=project) for group in Group.objects.filter(project=self): try: other = Group.objects.get( project=project, checksum=group.checksum, ) except Group.DoesNotExist: group.update(project=project) for model in (Event, GroupTagValue): model.objects.filter(project=self, group=group).update(project=project) else: Event.objects.filter(group=group).update(group=other) for obj in GroupTagValue.objects.filter(group=group): obj2, created = GroupTagValue.objects.get_or_create( project=project, group=group, key=obj.key, value=obj.value, defaults={'times_seen': obj.times_seen}) if not created: obj2.update(times_seen=F('times_seen') + obj.times_seen) for fv in TagValue.objects.filter(project=self): TagValue.objects.get_or_create(project=project, key=fv.key, value=fv.value) fv.delete() self.delete() def is_internal_project(self): for value in (settings.SENTRY_FRONTEND_PROJECT, settings.SENTRY_PROJECT): if str(self.id) == str(value) or str(self.slug) == str(value): return True return False def get_tags(self, with_internal=True): from sentry.models import TagKey if not hasattr(self, '_tag_cache'): tags = self.get_option('tags', None) if tags is None: tags = [ t for t in TagKey.objects.all_keys(self) if with_internal or not t.startswith('sentry:') ] self._tag_cache = tags return self._tag_cache # TODO: Make these a mixin def update_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.set_value(self, *args, **kwargs) def get_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.get_value(self, *args, **kwargs) def delete_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.unset_value(self, *args, **kwargs) @property def member_set(self): from sentry.models import OrganizationMember return OrganizationMember.objects.filter( Q(teams=self.team) | Q(has_global_access=True), user__is_active=True, organization=self.organization, ).distinct() def has_access(self, user, access=None): from sentry.models import AuthIdentity, OrganizationMember warnings.warn('Project.has_access is deprecated.', DeprecationWarning) queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) try: member = queryset.get() except OrganizationMember.DoesNotExist: return False try: auth_identity = AuthIdentity.objects.get( auth_provider__organization=self.organization_id, user=member.user_id, ) except AuthIdentity.DoesNotExist: return True return auth_identity.is_valid(member) def get_audit_log_data(self): return { 'slug': self.slug, 'name': self.name, 'status': self.status, 'public': self.public, 'platform': self.platform, }
class GroupRelease(Model): __core__ = False # TODO: Should be BoundedBigIntegerField project_id = BoundedPositiveIntegerField(db_index=True) group_id = BoundedBigIntegerField() # TODO: Should be BoundedBigIntegerField release_id = BoundedPositiveIntegerField(db_index=True) environment = models.CharField(max_length=64, default="") first_seen = models.DateTimeField(default=timezone.now) last_seen = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = "sentry" db_table = "sentry_grouprelease" unique_together = (("group_id", "release_id", "environment"),) __repr__ = sane_repr("group_id", "release_id") @classmethod def get_cache_key(cls, group_id, release_id, environment): return "grouprelease:1:{}:{}".format( group_id, md5_text(f"{release_id}:{environment}").hexdigest() ) @classmethod def get_or_create(cls, group, release, environment, datetime, **kwargs): cache_key = cls.get_cache_key(group.id, release.id, environment.name) instance = cache.get(cache_key) if instance is None: try: with transaction.atomic(): instance, created = ( cls.objects.create( release_id=release.id, group_id=group.id, environment=environment.name, project_id=group.project_id, first_seen=datetime, last_seen=datetime, ), True, ) except IntegrityError: instance, created = ( cls.objects.get( release_id=release.id, group_id=group.id, environment=environment.name ), False, ) cache.set(cache_key, instance, 3600) else: created = False # TODO(dcramer): this would be good to buffer, but until then we minimize # updates to once a minute, and allow Postgres to optimistically skip # it even if we can't if not created and instance.last_seen < datetime - timedelta(seconds=60): cls.objects.filter( id=instance.id, last_seen__lt=datetime - timedelta(seconds=60) ).update(last_seen=datetime) instance.last_seen = datetime cache.set(cache_key, instance, 3600) return instance
class SentryApp(ParanoidModel, HasApiScopes): __core__ = True application = models.OneToOneField( 'sentry.ApiApplication', null=True, on_delete=models.SET_NULL, related_name='sentry_app', ) # Much of the OAuth system in place currently depends on a User existing. # This "proxy user" represents the SentryApp in those cases. proxy_user = models.OneToOneField('sentry.User', null=True, on_delete=models.SET_NULL, related_name='sentry_app') # The Organization the Sentry App was created in "owns" it. Members of that # Org have differing access, dependent on their role within the Org. owner = FlexibleForeignKey('sentry.Organization', related_name='owned_sentry_apps') name = models.TextField() slug = models.CharField(max_length=SENTRY_APP_SLUG_MAX_LENGTH, unique=True) status = BoundedPositiveIntegerField( default=SentryAppStatus.UNPUBLISHED, choices=SentryAppStatus.as_choices(), db_index=True, ) uuid = models.CharField(max_length=64, default=default_uuid) redirect_url = models.URLField(null=True) webhook_url = models.URLField() # does the application subscribe to `event.alert`, # meaning can it be used in alert rules as a {service} ? is_alertable = models.BooleanField(default=False) events = ArrayField(of=models.TextField, null=True) overview = models.TextField(null=True) date_added = models.DateTimeField(default=timezone.now) date_updated = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_sentryapp' @classmethod def visible_for_user(cls, user): if user.is_superuser: return cls.objects.all() return cls.objects.filter( Q(status=SentryAppStatus.PUBLISHED) | Q(owner__in=user.get_orgs()), ) @property def organizations(self): if not self.pk: return Organization.objects.none() return Organization \ .objects \ .select_related('sentry_app_installations') \ .filter(sentry_app_installations__sentry_app_id=self.id) @property def teams(self): from sentry.models import Team if not self.pk: return Team.objects.none() return Team.objects.filter(organization__in=self.organizations) @property def is_published(self): return self.status == SentryAppStatus.PUBLISHED def save(self, *args, **kwargs): self._set_slug() return super(SentryApp, self).save(*args, **kwargs) def is_installed_on(self, organization): return self.organizations.filter(pk=organization.pk).exists() def _set_slug(self): """ Matches ``name``, but in lowercase, dash form. >>> self._set_slug('My Cool App') >>> self.slug my-cool-app """ if not self.slug: self.slug = slugify(self.name) def build_signature(self, body): secret = self.application.client_secret return hmac.new( key=secret.encode('utf-8'), msg=body.encode('utf-8'), digestmod=sha256, ).hexdigest()
class File(Model): __core__ = False name = models.CharField(max_length=128) type = models.CharField(max_length=64) timestamp = models.DateTimeField(default=timezone.now, db_index=True) headers = JSONField() blob = FlexibleForeignKey('sentry.FileBlob', null=True) # <Legacy fields> storage = models.CharField(max_length=128, null=True) storage_options = JSONField() path = models.TextField(null=True) size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, null=True) # </Legacy fields> class Meta: app_label = 'sentry' db_table = 'sentry_file' def delete(self, *args, **kwargs): super(File, self).delete(*args, **kwargs) if self.blob and not File.objects.filter(blob=self.blob).exists(): self.blob.delete() def ensure_blob(self): if self.blob: return lock_key = 'fileblob:convert:{}'.format(self.checksum) with Lock(lock_key, timeout=60): blob, created = FileBlob.objects.get_or_create( checksum=self.checksum, defaults={ 'storage': self.storage, 'storage_options': self.storage_options, 'path': self.path, 'size': self.size, 'timestamp': self.timestamp, }, ) # if this blob already existed, lets kill the duplicate # TODO(dcramer): kill data when fully migrated # if self.path != blob.path: # get_storage_class(self.storage)( # **self.storage_options # ).delete(self.path) self.update( blob=blob, # TODO(dcramer): kill data when fully migrated # checksum=None, # path=None, # storage=None, # storage_options={}, ) def getfile(self, *args, **kwargs): self.ensure_blob() return self.blob.getfile(*args, **kwargs)
class SentryAppInstallation(ParanoidModel): __include_in_export__ = True sentry_app = FlexibleForeignKey("sentry.SentryApp", related_name="installations") # SentryApp's are installed and scoped to an Organization. They will have # access, defined by their scopes, to Teams, Projects, etc. under that # Organization, implicitly. organization = FlexibleForeignKey("sentry.Organization", related_name="sentry_app_installations") # Each installation has a Grant that the integration can exchange for an # Access Token. api_grant = models.OneToOneField( "sentry.ApiGrant", null=True, on_delete=models.SET_NULL, related_name="sentry_app_installation", ) # Only use this token for public integrations since each install has only token at a time # An installation gets an access token once the Grant has been exchanged, # and is updated when the token gets refreshed. # # Do NOT Use this token for internal integrations since there could be multiple # need to look at SentryAppInstallationToken which connects api_tokens to installations api_token = models.OneToOneField( "sentry.ApiToken", null=True, on_delete=models.SET_NULL, related_name="sentry_app_installation", ) uuid = models.CharField(max_length=64, default=default_uuid) status = BoundedPositiveIntegerField( default=SentryAppInstallationStatus.PENDING, choices=SentryAppInstallationStatus.as_choices(), db_index=True, ) date_added = models.DateTimeField(default=timezone.now) date_updated = models.DateTimeField(default=timezone.now) objects = SentryAppInstallationForProviderManager() class Meta: app_label = "sentry" db_table = "sentry_sentryappinstallation" # Used when first creating an Installation to tell the serializer that the # grant code should be included in the serialization. is_new = False def to_dict(self): opts = self._meta data = {} for field in chain(opts.concrete_fields, opts.private_fields, opts.many_to_many): field_name = field.get_attname() data[field_name] = self.serializable_value(field_name) return data def save(self, *args, **kwargs): self.date_updated = timezone.now() return super().save(*args, **kwargs) def prepare_sentry_app_components(self, component_type, project=None, values=None): from sentry.models import SentryAppComponent try: component = SentryAppComponent.objects.get( sentry_app_id=self.sentry_app_id, type=component_type) except SentryAppComponent.DoesNotExist: return None return self.prepare_ui_component(component, project, values) def prepare_ui_component(self, component, project=None, values=None): from sentry.coreapi import APIError from sentry.mediators import sentry_app_components if values is None: values = [] try: sentry_app_components.Preparer.run(component=component, install=self, project=project, values=values) return component except APIError: # TODO(nisanthan): For now, skip showing the UI Component if the API requests fail return None
class File(Model): __core__ = False name = models.CharField(max_length=128) type = models.CharField(max_length=64) timestamp = models.DateTimeField(default=timezone.now, db_index=True) headers = JSONField() blobs = models.ManyToManyField('sentry.FileBlob', through='sentry.FileBlobIndex') size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, null=True, db_index=True) # <Legacy fields> # Remove in 8.1 blob = FlexibleForeignKey('sentry.FileBlob', null=True, related_name='legacy_blob') path = models.TextField(null=True) # </Legacy fields> class Meta: app_label = 'sentry' db_table = 'sentry_file' def _get_chunked_blob(self, mode=None, prefetch=False, prefetch_to=None, delete=True): return ChunkedFileBlobIndexWrapper( FileBlobIndex.objects.filter( file=self, ).select_related('blob').order_by('offset'), mode=mode, prefetch=prefetch, prefetch_to=prefetch_to, delete=delete ) def getfile(self, mode=None, prefetch=False, as_tempfile=False): """Returns a file object. By default the file is fetched on demand but if prefetch is enabled the file is fully prefetched into a tempfile before reading can happen. Additionally if `as_tempfile` is passed a NamedTemporaryFile is returned instead which can help in certain situations where a tempfile is necessary. """ if as_tempfile: prefetch = True impl = self._get_chunked_blob(mode, prefetch) if as_tempfile: return impl.detach_tempfile() return FileObj(impl, self.name) def save_to(self, path): """Fetches the file and emplaces it at a certain location. The write is done atomically to a tempfile first and then moved over. If the directory does not exist it is created. """ path = os.path.abspath(path) base = os.path.dirname(path) try: os.makedirs(base) except OSError: pass f = None try: f = self._get_chunked_blob(prefetch=True, prefetch_to=base, delete=False).detach_tempfile() os.rename(f.name, path) f.close() f = None finally: if f is not None: f.close() try: os.remove(f.name) except Exception: pass def putfile(self, fileobj, blob_size=DEFAULT_BLOB_SIZE, commit=True): """ Save a fileobj into a number of chunks. Returns a list of `FileBlobIndex` items. >>> indexes = file.putfile(fileobj) """ results = [] offset = 0 checksum = sha1(b'') while True: contents = fileobj.read(blob_size) if not contents: break checksum.update(contents) blob_fileobj = ContentFile(contents) blob = FileBlob.from_file(blob_fileobj) results.append(FileBlobIndex.objects.create( file=self, blob=blob, offset=offset, )) offset += blob.size self.size = offset self.checksum = checksum.hexdigest() metrics.timing('filestore.file-size', offset) if commit: self.save() return results def assemble_from_file_blob_ids(self, file_blob_ids, checksum, commit=True): """ This creates a file, from file blobs and returns a temp file with the contents. """ tf = tempfile.NamedTemporaryFile() with transaction.atomic(): file_blobs = FileBlob.objects.filter(id__in=file_blob_ids).all() # Make sure the blobs are sorted with the order provided file_blobs = sorted(file_blobs, key=lambda blob: file_blob_ids.index(blob.id)) new_checksum = sha1(b'') offset = 0 for blob in file_blobs: FileBlobIndex.objects.create( file=self, blob=blob, offset=offset, ) for chunk in blob.getfile().chunks(): new_checksum.update(chunk) tf.write(chunk) offset += blob.size self.size = offset self.checksum = new_checksum.hexdigest() if checksum != self.checksum: raise AssembleChecksumMismatch('Checksum mismatch') metrics.timing('filestore.file-size', offset) if commit: self.save() tf.flush() tf.seek(0) return tf
class OrganizationMember(Model): """ Identifies relationships between teams and users. Users listed as team members are considered to have access to all projects and could be thought of as team owners (though their access level may not) be set to ownership. """ organization = FlexibleForeignKey('sentry.Organization', related_name="member_set") user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, related_name="sentry_orgmember_set") email = models.EmailField(null=True, blank=True) type = BoundedPositiveIntegerField(choices=( (OrganizationMemberType.BOT, _('Bot')), (OrganizationMemberType.MEMBER, _('Member')), (OrganizationMemberType.ADMIN, _('Admin')), (OrganizationMemberType.OWNER, _('Owner')), ), default=OrganizationMemberType.MEMBER) flags = BitField(flags=( ('sso:linked', 'sso:linked'), ('sso:invalid', 'sso:invalid'), ), default=0) date_added = models.DateTimeField(default=timezone.now) has_global_access = models.BooleanField(default=True) counter = BoundedPositiveIntegerField(null=True, blank=True) teams = models.ManyToManyField('sentry.Team', blank=True, through='sentry.OrganizationMemberTeam') class Meta: app_label = 'sentry' db_table = 'sentry_organizationmember' unique_together = ( ('organization', 'user'), ('organization', 'email'), ) __repr__ = sane_repr('organization_id', 'user_id', 'type') @transaction.atomic def save(self, *args, **kwargs): assert self.user_id or self.email, \ 'Must set user or email' super(OrganizationMember, self).save(*args, **kwargs) if not self.counter: self._set_counter() @transaction.atomic def delete(self, *args, **kwargs): super(OrganizationMember, self).delete(*args, **kwargs) if self.counter: self._unshift_counter() def _unshift_counter(self): assert self.counter OrganizationMember.objects.filter( organization=self.organization, counter__gt=self.counter, ).update(counter=F('counter') - 1, ) def _set_counter(self): assert self.id and not self.counter # XXX(dcramer): this isnt atomic, but unfortunately MySQL doesnt support # the subquery pattern we'd need self.update(counter=OrganizationMember.objects.filter( organization=self.organization, ).count(), ) @property def is_pending(self): return self.user_id is None @property def token(self): checksum = md5() for x in (str(self.organization_id), self.get_email(), settings.SECRET_KEY): checksum.update(x) return checksum.hexdigest() def get_scopes(self): scopes = [] if self.type <= OrganizationMemberType.MEMBER: scopes.extend([ 'event:read', 'event:write', 'event:delete', 'org:read', 'project:read', 'team:read', 'member:read', ]) if self.type <= OrganizationMemberType.ADMIN: scopes.extend(['project:write', 'team:write']) if self.type <= OrganizationMemberType.OWNER: scopes.extend(['project:delete', 'team:delete']) if self.has_global_access: if self.type <= OrganizationMemberType.ADMIN: scopes.extend(['org:write', 'member:write']) if self.type <= OrganizationMemberType.OWNER: scopes.extend(['org:delete', 'member:delete']) return scopes def send_invite_email(self): from sentry.utils.email import MessageBuilder context = { 'email': self.email, 'organization': self.organization, 'url': absolute_uri( reverse('sentry-accept-invite', kwargs={ 'member_id': self.id, 'token': self.token, })), } msg = MessageBuilder( subject='Invite to join organization: %s' % (self.organization.name, ), template='sentry/emails/member_invite.txt', context=context, ) try: msg.send([self.get_email()]) except Exception as e: logger = logging.getLogger('sentry.mail.errors') logger.exception(e) def send_sso_link_email(self): from sentry.utils.email import MessageBuilder context = { 'email': self.email, 'organization_name': self.organization.name, 'url': absolute_uri( reverse('sentry-auth-link-identity', kwargs={ 'organization_slug': self.organization.slug, })), } msg = MessageBuilder( subject='Action Required for %s' % (self.organization.name, ), template='sentry/emails/auth-link-identity.txt', html_template='sentry/emails/auth-link-identity.html', context=context, ) try: msg.send([self.get_email()]) except Exception as e: logger = logging.getLogger('sentry.mail.errors') logger.exception(e) def get_display_name(self): if self.user_id: return self.user.get_display_name() return self.email def get_email(self): if self.user_id: return self.user.email return self.email def get_audit_log_data(self): return { 'email': self.email, 'user': self.user_id, 'teams': [t.id for t in self.get_teams()], 'has_global_access': self.has_global_access, } def get_teams(self): from sentry.models import Team if self.has_global_access: return Team.objects.filter( organization=self.organization, ).exclude( id__in=OrganizationMemberTeam.objects.filter( organizationmember=self, is_active=False, ).values('team')) return Team.objects.filter( id__in=OrganizationMemberTeam.objects.filter( organizationmember=self, is_active=True, ).values('team'))
class ReleaseProjectEnvironment(Model): __include_in_export__ = False release = FlexibleForeignKey("sentry.Release") project = FlexibleForeignKey("sentry.Project") environment = FlexibleForeignKey("sentry.Environment") new_issues_count = BoundedPositiveIntegerField(default=0) first_seen = models.DateTimeField(default=timezone.now) last_seen = models.DateTimeField(default=timezone.now, db_index=True) last_deploy_id = BoundedPositiveIntegerField(null=True, db_index=True) adopted = models.DateTimeField(null=True, blank=True) unadopted = models.DateTimeField(null=True, blank=True) class Meta: app_label = "sentry" db_table = "sentry_releaseprojectenvironment" index_together = ( ("project", "adopted", "environment"), ("project", "unadopted", "environment"), ) unique_together = (("project", "release", "environment"),) __repr__ = sane_repr("project", "release", "environment") @classmethod def get_cache_key(cls, release_id, project_id, environment_id): return f"releaseprojectenv:{release_id}:{project_id}:{environment_id}" @classmethod def get_or_create(cls, release, project, environment, datetime, **kwargs): with metrics.timer("models.releaseprojectenvironment.get_or_create") as metrics_tags: return cls._get_or_create_impl( release, project, environment, datetime, metrics_tags, **kwargs ) @classmethod def _get_or_create_impl(cls, release, project, environment, datetime, metrics_tags, **kwargs): cache_key = cls.get_cache_key(project.id, release.id, environment.id) instance = cache.get(cache_key) if instance is None: metrics_tags["cache_hit"] = "false" instance, created = cls.objects.get_or_create( release=release, project=project, environment=environment, defaults={"first_seen": datetime, "last_seen": datetime}, ) cache.set(cache_key, instance, 3600) metrics_tags["cache_hit"] = "true" else: created = False metrics_tags["created"] = "true" if created else "false" # Same as releaseenvironment model. Minimizes last_seen updates to once a minute if not created and instance.last_seen < datetime - timedelta(seconds=60): cls.objects.filter( id=instance.id, last_seen__lt=datetime - timedelta(seconds=60) ).update(last_seen=datetime) instance.last_seen = datetime cache.set(cache_key, instance, 3600) metrics_tags["bumped"] = "true" else: metrics_tags["bumped"] = "false" return instance @property def adoption_stages(self): if self.adopted is not None and self.unadopted is None: stage = ReleaseStages.ADOPTED elif self.adopted is not None and self.unadopted is not None: stage = ReleaseStages.REPLACED else: stage = ReleaseStages.LOW_ADOPTION return {"stage": stage, "adopted": self.adopted, "unadopted": self.unadopted}
class Group(Model): """ Aggregated message which summarizes a set of Events. """ project = FlexibleForeignKey('sentry.Project', null=True) logger = models.CharField(max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') checksum = models.CharField(max_length=32, db_index=True) num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=( (GroupStatus.UNRESOLVED, _('Unresolved')), (GroupStatus.RESOLVED, _('Resolved')), (GroupStatus.MUTED, _('Muted')), ), db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' unique_together = (('project', 'checksum'), ) verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = (("can_view", "Can view"), ) __repr__ = sane_repr('project_id', 'checksum') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen if self.message: # We limit what we store for the message body self.message = self.message.splitlines()[0][:255] super(Group, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri( reverse('sentry-group', args=[self.organization.slug, self.project.slug, self.id])) @property def avg_time_spent(self): if not self.time_spent_count: return return float(self.time_spent_total) / self.time_spent_count def natural_key(self): return (self.project, self.checksum) def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_muted(self): return self.get_status() == GroupStatus.MUTED def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): if self.status == GroupStatus.UNRESOLVED and self.is_over_resolve_age( ): return GroupStatus.RESOLVED return self.status def get_score(self): return int( math.log(self.times_seen) * 600 + float(time.mktime(self.last_seen.timetuple()))) def get_latest_event(self): from sentry.models import Event if not hasattr(self, '_latest_event'): try: self._latest_event = Event.objects.filter( group=self, ).order_by('-datetime')[0] except IndexError: self._latest_event = None return self._latest_event def get_unique_tags(self, tag, since=None, order_by='-times_seen'): # TODO(dcramer): this has zero test coverage and is a critical path from sentry.models import GroupTagValue queryset = GroupTagValue.objects.filter( group=self, key=tag, ) if since: queryset = queryset.filter(last_seen__gte=since) return queryset.values_list( 'value', 'times_seen', 'first_seen', 'last_seen', ).order_by(order_by) def get_tags(self, with_internal=True): from sentry.models import GroupTagKey, TagKey if not hasattr(self, '_tag_cache'): group_tags = GroupTagKey.objects.filter( group=self, project=self.project, ) if not with_internal: group_tags = group_tags.exclude(key__startswith='sentry:') group_tags = list(group_tags.values_list('key', flat=True)) tag_keys = dict( (t.key, t) for t in TagKey.objects.filter(project=self.project, key__in=group_tags)) results = [] for key in group_tags: try: tag_key = tag_keys[key] except KeyError: label = key.replace('_', ' ').title() else: label = tag_key.get_label() results.append({ 'key': key, 'label': label, }) self._tag_cache = sorted(results, key=lambda x: x['label']) return self._tag_cache def error(self): return self.message error.short_description = _('error') def has_two_part_message(self): message = strip(self.message) return '\n' in message or len(message) > 100 @property def title(self): culprit = strip(self.culprit) if culprit: return culprit return self.message @property def message_short(self): message = strip(self.message) if not message: message = '<unlabeled message>' else: message = truncatechars(message.splitlines()[0], 100) return message @property def organization(self): return self.project.organization @property def team(self): return self.project.team def get_email_subject(self): return '[%s %s] %s: %s' % ( self.team.name.encode('utf-8'), self.project.name.encode('utf-8'), six.text_type(self.get_level_display()).upper().encode('utf-8'), self.message_short.encode('utf-8'))
class Organization(Model): """ An organization represents a group of individuals which maintain ownership of projects. """ __core__ = True name = models.CharField(max_length=64) slug = models.SlugField(unique=True) status = BoundedPositiveIntegerField( choices=OrganizationStatus.as_choices(), default=OrganizationStatus.ACTIVE.value ) date_added = models.DateTimeField(default=timezone.now) members = models.ManyToManyField( settings.AUTH_USER_MODEL, through="sentry.OrganizationMember", related_name="org_memberships", through_fields=("organization", "user"), ) default_role = models.CharField(max_length=32, default=str(roles.get_default().id)) flags = BitField( flags=( ( "allow_joinleave", "Allow members to join and leave teams without requiring approval.", ), ( "enhanced_privacy", "Enable enhanced privacy controls to limit personally identifiable information (PII) as well as source code in things like notifications.", ), ( "disable_shared_issues", "Disable sharing of limited details on issues to anonymous users.", ), ( "early_adopter", "Enable early adopter status, gaining access to features prior to public release.", ), ("require_2fa", "Require and enforce two-factor authentication for all members."), ( "disable_new_visibility_features", "Temporarily opt out of new visibility features and ui", ), ), default=1, ) objects = OrganizationManager(cache_fields=("pk", "slug")) class Meta: app_label = "sentry" db_table = "sentry_organization" __repr__ = sane_repr("owner_id", "name", "slug") @classmethod def get_default(cls): """ Return the organization used in single organization mode. """ if settings.SENTRY_ORGANIZATION is not None: return cls.objects.get(id=settings.SENTRY_ORGANIZATION) return cls.objects.filter(status=OrganizationStatus.ACTIVE)[0] def __str__(self): return f"{self.name} ({self.slug})" def save(self, *args, **kwargs): if not self.slug: lock = locks.get("slug:organization", duration=5) with TimedRetryPolicy(10)(lock.acquire): slugify_instance(self, self.name, reserved=RESERVED_ORGANIZATION_SLUGS) super().save(*args, **kwargs) else: super().save(*args, **kwargs) def delete(self, **kwargs): from sentry.models import NotificationSetting if self.is_default: raise Exception("You cannot delete the the default organization.") # There is no foreign key relationship so we have to manually cascade. NotificationSetting.objects.remove_for_organization(self) return super().delete(**kwargs) @cached_property def is_default(self): if not settings.SENTRY_SINGLE_ORGANIZATION: return False return self == type(self).get_default() def has_access(self, user, access=None): queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) return queryset.exists() def get_audit_log_data(self): return { "id": self.id, "slug": self.slug, "name": self.name, "status": int(self.status), "flags": int(self.flags), "default_role": self.default_role, } def get_owners(self): from sentry.models import User return User.objects.filter( sentry_orgmember_set__role=roles.get_top_dog().id, sentry_orgmember_set__organization=self, is_active=True, ) def get_default_owner(self): if not hasattr(self, "_default_owner"): self._default_owner = self.get_owners()[0] return self._default_owner def has_single_owner(self): from sentry.models import OrganizationMember count = OrganizationMember.objects.filter( organization=self, role=roles.get_top_dog().id, user__isnull=False, user__is_active=True )[:2].count() return count == 1 def merge_to(from_org, to_org): from sentry.models import ( ApiKey, AuditLogEntry, AuthProvider, Commit, Environment, OrganizationAvatar, OrganizationIntegration, OrganizationMember, OrganizationMemberTeam, Project, Release, ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseHeadCommit, Repository, Team, ) for from_member in OrganizationMember.objects.filter( organization=from_org, user__isnull=False ): logger = logging.getLogger("sentry.merge") try: to_member = OrganizationMember.objects.get( organization=to_org, user=from_member.user ) except OrganizationMember.DoesNotExist: from_member.update(organization=to_org) to_member = from_member else: qs = OrganizationMemberTeam.objects.filter( organizationmember=from_member, is_active=True ).select_related() for omt in qs: OrganizationMemberTeam.objects.create_or_update( organizationmember=to_member, team=omt.team, defaults={"is_active": True} ) logger.info( "user.migrate", extra={ "instance_id": from_member.id, "new_member_id": to_member.id, "from_organization_id": from_org.id, "to_organization_id": to_org.id, }, ) for from_team in Team.objects.filter(organization=from_org): try: with transaction.atomic(): from_team.update(organization=to_org) except IntegrityError: slugify_instance(from_team, from_team.name, organization=to_org) from_team.update(organization=to_org, slug=from_team.slug) logger.info( "team.migrate", extra={ "instance_id": from_team.id, "new_slug": from_team.slug, "from_organization_id": from_org.id, "to_organization_id": to_org.id, }, ) for from_project in Project.objects.filter(organization=from_org): try: with transaction.atomic(): from_project.update(organization=to_org) except IntegrityError: slugify_instance( from_project, from_project.name, organization=to_org, reserved=RESERVED_PROJECT_SLUGS, ) from_project.update(organization=to_org, slug=from_project.slug) logger.info( "project.migrate", extra={ "instance_id": from_project.id, "new_slug": from_project.slug, "from_organization_id": from_org.id, "to_organization_id": to_org.id, }, ) # TODO(jess): update this when adding unique constraint # on version, organization for releases for from_release in Release.objects.filter(organization=from_org): try: to_release = Release.objects.get(version=from_release.version, organization=to_org) except Release.DoesNotExist: Release.objects.filter(id=from_release.id).update(organization=to_org) else: Release.merge(to_release, [from_release]) logger.info( "release.migrate", extra={ "instance_id": from_release.id, "from_organization_id": from_org.id, "to_organization_id": to_org.id, }, ) def do_update(queryset, params): model_name = queryset.model.__name__.lower() try: with transaction.atomic(): queryset.update(**params) except IntegrityError: for instance in queryset: try: with transaction.atomic(): instance.update(**params) except IntegrityError: logger.info( f"{model_name}.migrate-skipped", extra={ "from_organization_id": from_org.id, "to_organization_id": to_org.id, }, ) else: logger.info( f"{model_name}.migrate", extra={ "instance_id": instance.id, "from_organization_id": from_org.id, "to_organization_id": to_org.id, }, ) else: logger.info( f"{model_name}.migrate", extra={"from_organization_id": from_org.id, "to_organization_id": to_org.id}, ) INST_MODEL_LIST = ( AuthProvider, ApiKey, AuditLogEntry, OrganizationAvatar, OrganizationIntegration, ReleaseEnvironment, ReleaseFile, ) ATTR_MODEL_LIST = (Commit, ReleaseCommit, ReleaseHeadCommit, Repository, Environment) for model in INST_MODEL_LIST: queryset = model.objects.filter(organization=from_org) do_update(queryset, {"organization": to_org}) for model in ATTR_MODEL_LIST: queryset = model.objects.filter(organization_id=from_org.id) do_update(queryset, {"organization_id": to_org.id}) # TODO: Make these a mixin def update_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.set_value(self, *args, **kwargs) def get_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.get_value(self, *args, **kwargs) def delete_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.unset_value(self, *args, **kwargs) def send_delete_confirmation(self, audit_log_entry, countdown): from sentry import options from sentry.utils.email import MessageBuilder owners = self.get_owners() context = { "organization": self, "audit_log_entry": audit_log_entry, "eta": timezone.now() + timedelta(seconds=countdown), "url": absolute_uri(reverse("sentry-restore-organization", args=[self.slug])), } MessageBuilder( subject="{}Organization Queued for Deletion".format(options.get("mail.subject-prefix")), template="sentry/emails/org_delete_confirm.txt", html_template="sentry/emails/org_delete_confirm.html", type="org.confirm_delete", context=context, ).send_async([o.email for o in owners]) def handle_2fa_required(self, request): from sentry.models import ApiKey from sentry.tasks.auth import remove_2fa_non_compliant_members actor_id = request.user.id if request.user and request.user.is_authenticated() else None api_key_id = ( request.auth.id if hasattr(request, "auth") and isinstance(request.auth, ApiKey) else None ) ip_address = request.META["REMOTE_ADDR"] remove_2fa_non_compliant_members.delay( self.id, actor_id=actor_id, actor_key_id=api_key_id, ip_address=ip_address ) def get_url_viewname(self): return "sentry-organization-issue-list" def get_url(self): return reverse(self.get_url_viewname(), args=[self.slug])
class Team(Model): """ A team represents a group of individuals which maintain ownership of projects. """ __core__ = True organization = FlexibleForeignKey("sentry.Organization") slug = models.SlugField() name = models.CharField(max_length=64) status = BoundedPositiveIntegerField( choices=( (TeamStatus.VISIBLE, _("Active")), (TeamStatus.PENDING_DELETION, _("Pending Deletion")), (TeamStatus.DELETION_IN_PROGRESS, _("Deletion in Progress")), ), default=TeamStatus.VISIBLE, ) date_added = models.DateTimeField(default=timezone.now, null=True) objects = TeamManager(cache_fields=("pk", "slug")) class Meta: app_label = "sentry" db_table = "sentry_team" unique_together = (("organization", "slug"),) __repr__ = sane_repr("name", "slug") def __unicode__(self): return "%s (%s)" % (self.name, self.slug) def save(self, *args, **kwargs): if not self.slug: lock = locks.get("slug:team", duration=5) with TimedRetryPolicy(10)(lock.acquire): slugify_instance(self, self.name, organization=self.organization) super(Team, self).save(*args, **kwargs) else: super(Team, self).save(*args, **kwargs) @property def member_set(self): return self.organization.member_set.filter( organizationmemberteam__team=self, organizationmemberteam__is_active=True, user__is_active=True, ).distinct() def has_access(self, user, access=None): from sentry.models import AuthIdentity, OrganizationMember warnings.warn("Team.has_access is deprecated.", DeprecationWarning) queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) try: member = queryset.get() except OrganizationMember.DoesNotExist: return False try: auth_identity = AuthIdentity.objects.get( auth_provider__organization=self.organization_id, user=member.user_id ) except AuthIdentity.DoesNotExist: return True return auth_identity.is_valid(member) def transfer_to(self, organization): """ Transfers a team and all projects under it to the given organization. """ from sentry.models import ( OrganizationAccessRequest, OrganizationMember, OrganizationMemberTeam, Project, ProjectTeam, ReleaseProject, ReleaseProjectEnvironment, ) try: with transaction.atomic(): self.update(organization=organization) except IntegrityError: # likely this means a team already exists, let's try to coerce to # it instead of a blind transfer new_team = Team.objects.get(organization=organization, slug=self.slug) else: new_team = self project_ids = list( Project.objects.filter(teams=self) .exclude(organization=organization) .values_list("id", flat=True) ) # remove associations with releases from other org ReleaseProject.objects.filter(project_id__in=project_ids).delete() ReleaseProjectEnvironment.objects.filter(project_id__in=project_ids).delete() Project.objects.filter(id__in=project_ids).update(organization=organization) ProjectTeam.objects.filter(project_id__in=project_ids).update(team=new_team) # remove any pending access requests from the old organization if self != new_team: OrganizationAccessRequest.objects.filter(team=self).delete() # identify shared members and ensure they retain team access # under the new organization old_memberships = OrganizationMember.objects.filter(teams=self).exclude( organization=organization ) for member in old_memberships: try: new_member = OrganizationMember.objects.get( user=member.user, organization=organization ) except OrganizationMember.DoesNotExist: continue try: with transaction.atomic(): OrganizationMemberTeam.objects.create( team=new_team, organizationmember=new_member ) except IntegrityError: pass OrganizationMemberTeam.objects.filter(team=self).exclude( organizationmember__organization=organization ).delete() if new_team != self: cursor = connections[router.db_for_write(Team)].cursor() # we use a cursor here to avoid automatic cascading of relations # in Django try: cursor.execute("DELETE FROM sentry_team WHERE id = %s", [self.id]) finally: cursor.close() def get_audit_log_data(self): return {"id": self.id, "slug": self.slug, "name": self.name, "status": self.status}
class Release(Model): """ A release is generally created when a new version is pushed into a production state. A commit is generally a git commit. See also releasecommit.py """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") projects = models.ManyToManyField( "sentry.Project", related_name="releases", through=ReleaseProject ) status = BoundedPositiveIntegerField( default=ReleaseStatus.OPEN, null=True, choices=( (ReleaseStatus.OPEN, _("Open")), (ReleaseStatus.ARCHIVED, _("Archived")), ), ) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) # new issues (groups) that arise as a consequence of this release new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey("sentry.User", null=True, blank=True, on_delete=models.SET_NULL) # materialized stats commit_count = BoundedPositiveIntegerField(null=True, default=0) last_commit_id = BoundedBigIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True, default=0) last_deploy_id = BoundedPositiveIntegerField(null=True) # HACK HACK HACK # As a transitionary step we permit release rows to exist multiple times # where they are "specialized" for a specific project. The goal is to # later split up releases by project again. This is for instance used # by the org release listing. _for_project_id = None class Meta: app_label = "sentry" db_table = "sentry_release" unique_together = (("organization", "version"),) __repr__ = sane_repr("organization_id", "version") def __eq__(self, other): """Make sure that specialized releases are only comparable to the same other specialized release. This for instance lets us treat them separately for serialization purposes. """ return Model.__eq__(self, other) and self._for_project_id == other._for_project_id @staticmethod def is_valid_version(value): return not ( not value or any(c in value for c in BAD_RELEASE_CHARS) or value in (".", "..") or value.lower() == "latest" ) @classmethod def get_cache_key(cls, organization_id, version): return "release:3:%s:%s" % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return "releasecommits:{}:{}".format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): with metrics.timer("models.release.get_or_create") as metric_tags: return cls._get_or_create_impl(project, version, date_added, metric_tags) @classmethod def _get_or_create_impl(cls, project, version, date_added, metric_tags): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ("%s-%s" % (project.slug, version))[:DB_VERSION_LENGTH] releases = list( cls.objects.filter( organization_id=project.organization_id, version__in=[version, project_version], projects=project, ) ) if releases: try: release = [r for r in releases if r.version == project_version][0] except IndexError: release = releases[0] metric_tags["created"] = "false" else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) metric_tags["created"] = "true" except IntegrityError: metric_tags["created"] = "false" release = cls.objects.get( organization_id=project.organization_id, version=version ) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F("flags").bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) metric_tags["cache_hit"] = "false" else: metric_tags["cache_hit"] = "true" return release @cached_property def version_info(self): try: return parse_release(self.version) except RelayError: # This can happen on invalid legacy releases return None @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, Group, GroupRelease, GroupResolution, ) model_list = ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution, ) for release in from_releases: for model in model_list: if hasattr(model, "release"): update_kwargs = {"release": to_release} else: update_kwargs = {"release_id": to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update(**update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update(**update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update(first_release=to_release) release.delete() def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create( release=self, name=name, defaults={"date_added": date_added, "organization_id": self.organization_id}, )[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F("flags").bitor(Project.flags.has_releases)) except IntegrityError: return False else: return True def handle_commit_ranges(self, refs): """ Takes commit refs of the form: [ { 'previousCommit': None, 'commit': 'previous_commit..commit', } ] Note: Overwrites 'previousCommit' and 'commit' """ for ref in refs: if COMMIT_RANGE_DELIMITER in ref["commit"]: ref["previousCommit"], ref["commit"] = ref["commit"].split(COMMIT_RANGE_DELIMITER) def set_refs(self, refs, user, fetch=False): with sentry_sdk.start_span(op="set_refs"): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = ( type(self) .objects.filter( organization_id=self.organization_id, projects__in=self.projects.all() ) .extra(select={"sort": "COALESCE(date_released, date_added)"}) .exclude(version=self.version) .order_by("-sort") .first() ) names = {r["repository"] for r in refs} repos = list( Repository.objects.filter(organization_id=self.organization_id, name__in=names) ) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository("Invalid repository names: %s" % ",".join(invalid_repos)) self.handle_commit_ranges(refs) for ref in refs: repo = repos_by_name[ref["repository"]] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref["commit"] )[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={"commit": commit}, ) if fetch: fetch_commits.apply_async( kwargs={ "release_id": self.id, "user_id": user.id, "refs": refs, "prev_release_id": prev_release and prev_release.id, } ) def set_commits(self, commit_list): """ Bind a list of commits to this release. This will clear any existing commit log and replace it with the given commits. """ # Sort commit list in reverse order commit_list.sort(key=lambda commit: commit.get("timestamp", 0), reverse=True) # TODO(dcramer): this function could use some cleanup/refactoring as it's a bit unwieldy from sentry.models import ( Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest, ) from sentry.plugins.providers.repository import RepositoryProvider from sentry.tasks.integrations import kick_off_status_syncs # todo(meredith): implement for IntegrationRepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get("message", "")) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) if lock.locked(): # Signal failure to the consumer rapidly. This aims to prevent the number # of timeouts and prevent web worker exhaustion when customers create # the same release rapidly for different projects. raise ReleaseCommitError with TimedRetryPolicy(10)(lock.acquire): start = time() with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter(release=self).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get("repository") or "organization-{}".format( self.organization_id ) if repo_name not in repos: repos[repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name )[0] else: repo = repos[repo_name] author_email = data.get("author_email") if author_email is None and data.get("author_name"): author_email = ( re.sub(r"[^a-zA-Z0-9\-_\.]*", "", data["author_name"]).lower() + "@localhost" ) author_email = truncatechars(author_email, 75) if not author_email: author = None elif author_email not in authors: author_data = {"name": data.get("author_name")} author, created = CommitAuthor.objects.get_or_create( organization_id=self.organization_id, email=author_email, defaults=author_data, ) if author.name != author_data["name"]: author.update(name=author_data["name"]) authors[author_email] = author else: author = authors[author_email] commit_data = {} # Update/set message and author if they are provided. if author is not None: commit_data["author"] = author if "message" in data: commit_data["message"] = data["message"] if "timestamp" in data: commit_data["date_added"] = data["timestamp"] commit, created = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=data["id"], defaults=commit_data, ) if not created: commit_data = { key: value for key, value in commit_data.items() if getattr(commit, key) != value } if commit_data: commit.update(**commit_data) if author is None: author = commit.author commit_author_by_commit[commit.id] = author # Guard against patch_set being None patch_set = data.get("patch_set") or [] for patched_file in patch_set: try: with transaction.atomic(): CommitFileChange.objects.create( organization_id=self.organization.id, commit=commit, filename=patched_file["path"], type=patched_file["type"], ) except IntegrityError: pass try: with transaction.atomic(): ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) except IntegrityError: pass if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ str(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False ) .values_list("commit__author_id", flat=True) .distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) metrics.timing("release.set_commits.duration", time() - start) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in head_commit_by_repo.items(): try: with transaction.atomic(): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list( ReleaseCommit.objects.filter(release=self) .select_related("commit") .values("commit_id", "commit__key") ) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc["commit_id"] for rc in release_commits], ).values_list("group_id", "linked_id") ) commit_group_authors = [ (cr[0], commit_author_by_commit.get(cr[1])) for cr in commit_resolutions # group_id ] pr_ids_by_merge_commit = list( PullRequest.objects.filter( merge_commit_sha__in=[rc["commit__key"] for rc in release_commits], organization_id=self.organization_id, ).values_list("id", flat=True) ) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list("group_id", "linked_id") ) pr_authors = list( PullRequest.objects.filter( id__in=[prr[1] for prr in pull_request_resolutions] ).select_related("author") ) pr_authors_dict = {pra.id: pra.author for pra in pr_authors} pull_request_group_authors = [ (prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions ] user_by_author = {None: None} commits_and_prs = list(itertools.chain(commit_group_authors, pull_request_group_authors)) group_project_lookup = dict( Group.objects.filter(id__in=[group_id for group_id, _ in commits_and_prs]).values_list( "id", "project_id" ) ) for group_id, author in commits_and_prs: if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ "release": self, "type": GroupResolution.Type.in_release, "status": GroupResolution.Status.resolved, "actor_id": actor.id if actor else None, }, ) group = Group.objects.get(id=group_id) group.update(status=GroupStatus.RESOLVED) remove_group_from_inbox(group, action=GroupInboxRemoveAction.RESOLVED, user=actor) metrics.incr("group.resolved", instance="in_commit", skip_internal=True) issue_resolved.send_robust( organization_id=self.organization_id, user=actor, group=group, project=group.project, resolution_type="with_commit", sender=type(self), ) kick_off_status_syncs.apply_async( kwargs={"project_id": group_project_lookup[group_id], "group_id": group_id} ) def safe_delete(self): """Deletes a release if possible or raises a `UnsafeReleaseDeletion` exception. """ from sentry.models import Group, ReleaseFile from sentry.snuba.sessions import check_has_health_data # we don't want to remove the first_release metadata on the Group, and # while people might want to kill a release (maybe to remove files), # removing the release is prevented if Group.objects.filter(first_release=self).exists(): raise UnsafeReleaseDeletion(ERR_RELEASE_REFERENCED) # We do not allow releases with health data to be deleted because # the upserting from snuba data would create the release again. # We would need to be able to delete this data from snuba which we # can't do yet. project_ids = list(self.projects.values_list("id").all()) if check_has_health_data([(p[0], self.version) for p in project_ids]): raise UnsafeReleaseDeletion(ERR_RELEASE_HEALTH_DATA) # TODO(dcramer): this needs to happen in the queue as it could be a long # and expensive operation file_list = ReleaseFile.objects.filter(release=self).select_related("file") for releasefile in file_list: releasefile.file.delete() releasefile.delete() self.delete()
class Project(Model): """ Projects are permission based namespaces which generally are the top level entry point for all data. """ __core__ = True slug = models.SlugField(null=True) name = models.CharField(max_length=200) forced_color = models.CharField(max_length=6, null=True, blank=True) organization = FlexibleForeignKey('sentry.Organization') # DEPRECATED. use teams instead. team = FlexibleForeignKey('sentry.Team', null=True) teams = models.ManyToManyField( 'sentry.Team', related_name='teams', through=ProjectTeam ) public = models.BooleanField(default=False) date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField( default=0, choices=( (ObjectStatus.VISIBLE, _('Active')), (ObjectStatus.PENDING_DELETION, _('Pending Deletion')), (ObjectStatus.DELETION_IN_PROGRESS, _('Deletion in Progress')), ), db_index=True ) # projects that were created before this field was present # will have their first_event field set to date_added first_event = models.DateTimeField(null=True) flags = BitField( flags=(('has_releases', 'This Project has sent release data'), ), default=0, null=True ) objects = ProjectManager(cache_fields=[ 'pk', 'slug', ]) platform = models.CharField(max_length=64, null=True) class Meta: app_label = 'sentry' db_table = 'sentry_project' unique_together = (('team', 'slug'), ('organization', 'slug')) __repr__ = sane_repr('team_id', 'name', 'slug') def __unicode__(self): return u'%s (%s)' % (self.name, self.slug) def next_short_id(self): from sentry.models import Counter return Counter.increment(self) def save(self, *args, **kwargs): if not self.slug: lock = locks.get('slug:project', duration=5) with TimedRetryPolicy(10)(lock.acquire): slugify_instance(self, self.name, organization=self.organization) super(Project, self).save(*args, **kwargs) else: super(Project, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri('/{}/{}/'.format(self.organization.slug, self.slug)) def is_internal_project(self): for value in (settings.SENTRY_FRONTEND_PROJECT, settings.SENTRY_PROJECT): if six.text_type(self.id) == six.text_type(value) or six.text_type( self.slug ) == six.text_type(value): return True return False # TODO: Make these a mixin def update_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.set_value(self, *args, **kwargs) def get_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.get_value(self, *args, **kwargs) def delete_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.unset_value(self, *args, **kwargs) @property def callsign(self): return self.slug.upper() @property def color(self): if self.forced_color is not None: return '#%s' % self.forced_color return get_hashed_color(self.callsign or self.slug) @property def member_set(self): from sentry.models import OrganizationMember return self.organization.member_set.filter( id__in=OrganizationMember.objects.filter( organizationmemberteam__is_active=True, organizationmemberteam__team=self.team, ).values('id'), user__is_active=True, ).distinct() def has_access(self, user, access=None): from sentry.models import AuthIdentity, OrganizationMember warnings.warn('Project.has_access is deprecated.', DeprecationWarning) queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) try: member = queryset.get() except OrganizationMember.DoesNotExist: return False try: auth_identity = AuthIdentity.objects.get( auth_provider__organization=self.organization_id, user=member.user_id, ) except AuthIdentity.DoesNotExist: return True return auth_identity.is_valid(member) def get_audit_log_data(self): return { 'id': self.id, 'slug': self.slug, 'name': self.name, 'status': self.status, 'public': self.public, } def get_full_name(self): if self.team.name not in self.name: return '%s %s' % (self.team.name, self.name) return self.name def get_notification_recipients(self, user_option): from sentry.models import UserOption alert_settings = dict( (o.user_id, int(o.value)) for o in UserOption.objects.filter( project=self, key=user_option, ) ) disabled = set(u for u, v in six.iteritems(alert_settings) if v == 0) member_set = set( self.member_set.exclude( user__in=disabled, ).values_list('user', flat=True) ) # determine members default settings members_to_check = set(u for u in member_set if u not in alert_settings) if members_to_check: disabled = set( ( uo.user_id for uo in UserOption.objects.filter( key='subscribe_by_default', user__in=members_to_check, ) if uo.value == '0' ) ) member_set = [x for x in member_set if x not in disabled] return member_set def get_mail_alert_subscribers(self): user_ids = self.get_notification_recipients('mail:alert') if not user_ids: return [] from sentry.models import User return list(User.objects.filter(id__in=user_ids)) def is_user_subscribed_to_mail_alerts(self, user): from sentry.models import UserOption is_enabled = UserOption.objects.get_value(user, 'mail:alert', project=self) if is_enabled is None: is_enabled = UserOption.objects.get_value(user, 'subscribe_by_default', '1') == '1' else: is_enabled = bool(is_enabled) return is_enabled def transfer_to(self, team): from sentry.models import ProjectTeam, ReleaseProject organization = team.organization from_team_id = self.team_id # We only need to delete ReleaseProjects when moving to a different # Organization. Releases are bound to Organization, so it's not realistic # to keep this link unless we say, copied all Releases as well. if self.organization_id != organization.id: ReleaseProject.objects.filter( project_id=self.id, ).delete() self.organization = organization self.team = team try: with transaction.atomic(): self.update( organization=organization, team=team, ) except IntegrityError: slugify_instance(self, self.name, organization=organization) self.update( slug=self.slug, organization=organization, team=team, ) ProjectTeam.objects.filter(project=self, team_id=from_team_id).update(team=team) def add_team(self, team): try: with transaction.atomic(): ProjectTeam.objects.create(project=self, team=team) except IntegrityError: return False else: return True def remove_team(self, team): ProjectTeam.objects.filter( project=self, team=team, ).delete() def get_security_token(self): lock = locks.get(self.get_lock_key(), duration=5) with TimedRetryPolicy(10)(lock.acquire): security_token = self.get_option('sentry:token', None) if security_token is None: security_token = uuid1().hex self.update_option('sentry:token', security_token) return security_token def get_lock_key(self): return 'project_token:%s' % self.id
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey('sentry.Project', null=True) logger = models.CharField( max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField( choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField( max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=( (GroupStatus.UNRESOLVED, _('Unresolved')), (GroupStatus.RESOLVED, _('Resolved')), (GroupStatus.IGNORED, _('Ignored')), ), db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey('sentry.Release', null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = ( ("can_view", "Can view"), ) index_together = ( ('project', 'first_release'), ) unique_together = ( ('project', 'short_id'), ) __repr__ = sane_repr('project_id') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) super(Group, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri(reverse('sentry-group', args=[ self.organization.slug, self.project.slug, self.id])) @property def qualified_short_id(self): if self.short_id is not None: return '%s-%s' % ( self.project.slug.upper(), base32_encode(self.short_id), ) @property def event_set(self): from sentry.models import Event return Event.objects.filter(group_id=self.id) def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta(hours=int(resolve_age)) def is_ignored(self): return self.get_status() == GroupStatus.IGNORED # TODO(dcramer): remove in 9.0 / after plugins no long ref is_muted = is_ignored def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze if self.status == GroupStatus.IGNORED: try: snooze = GroupSnooze.objects.get(group=self) except GroupSnooze.DoesNotExist: pass else: # XXX(dcramer): if the snooze row exists then we need # to confirm its still valid if snooze.until > timezone.now(): return GroupStatus.IGNORED else: return GroupStatus.UNRESOLVED if self.status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return self.status def get_share_id(self): return b16encode( ('{}.{}'.format(self.project_id, self.id)).encode('utf-8') ).lower().decode('utf-8') @classmethod def from_share_id(cls, share_id): if not share_id: raise cls.DoesNotExist try: project_id, group_id = b16decode(share_id.upper()).decode('utf-8').split('.') except (ValueError, TypeError): raise cls.DoesNotExist if not (project_id.isdigit() and group_id.isdigit()): raise cls.DoesNotExist return cls.objects.get(project=project_id, id=group_id) def get_score(self): return int(math.log(self.times_seen) * 600 + float(time.mktime(self.last_seen.timetuple()))) def get_latest_event(self): from sentry.models import Event if not hasattr(self, '_latest_event'): latest_events = sorted( Event.objects.filter( group_id=self.id, ).order_by('-datetime')[0:5], key=EVENT_ORDERING_KEY, reverse=True, ) try: self._latest_event = latest_events[0] except IndexError: self._latest_event = None return self._latest_event def get_oldest_event(self): from sentry.models import Event if not hasattr(self, '_oldest_event'): oldest_events = sorted( Event.objects.filter( group_id=self.id, ).order_by('datetime')[0:5], key=EVENT_ORDERING_KEY, ) try: self._oldest_event = oldest_events[0] except IndexError: self._oldest_event = None return self._oldest_event def get_unique_tags(self, tag, since=None, order_by='-times_seen'): # TODO(dcramer): this has zero test coverage and is a critical path from sentry.models import GroupTagValue queryset = GroupTagValue.objects.filter( group=self, key=tag, ) if since: queryset = queryset.filter(last_seen__gte=since) return queryset.values_list( 'value', 'times_seen', 'first_seen', 'last_seen', ).order_by(order_by) def get_tags(self, with_internal=True): from sentry.models import GroupTagKey, TagKey if not hasattr(self, '_tag_cache'): group_tags = GroupTagKey.objects.filter( group=self, project=self.project, ) if not with_internal: group_tags = group_tags.exclude(key__startswith='sentry:') group_tags = list(group_tags.values_list('key', flat=True)) tag_keys = dict( (t.key, t) for t in TagKey.objects.filter( project=self.project, key__in=group_tags ) ) results = [] for key in group_tags: try: tag_key = tag_keys[key] except KeyError: label = key.replace('_', ' ').title() else: label = tag_key.get_label() results.append({ 'key': key, 'label': label, }) self._tag_cache = sorted(results, key=lambda x: x['label']) return self._tag_cache def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get('type', 'default') def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ etype = self.data.get('type') if etype is None: etype = 'default' if 'metadata' not in self.data: data = self.data.copy() if self.data else {} data['message'] = self.message return eventtypes.get(etype)(data).get_metadata() return self.data['metadata'] @property def title(self): et = eventtypes.get(self.get_event_type())(self.data) return et.to_string(self.get_event_metadata()) def error(self): warnings.warn('Group.error is deprecated, use Group.title', DeprecationWarning) return self.title error.short_description = _('error') @property def message_short(self): warnings.warn('Group.message_short is deprecated, use Group.title', DeprecationWarning) return self.title def has_two_part_message(self): warnings.warn('Group.has_two_part_message is no longer used', DeprecationWarning) return False @property def organization(self): return self.project.organization @property def team(self): return self.project.team @property def checksum(self): warnings.warn('Group.checksum is no longer used', DeprecationWarning) return '' def get_email_subject(self): return '[%s] %s: %s' % ( self.project.get_full_name().encode('utf-8'), six.text_type(self.get_level_display()).upper().encode('utf-8'), self.title.encode('utf-8') )
class SentryApp(ParanoidModel, HasApiScopes): __core__ = True application = models.OneToOneField("sentry.ApiApplication", null=True, on_delete=models.SET_NULL, related_name="sentry_app") # Much of the OAuth system in place currently depends on a User existing. # This "proxy user" represents the SentryApp in those cases. proxy_user = models.OneToOneField("sentry.User", null=True, on_delete=models.SET_NULL, related_name="sentry_app") # The Organization the Sentry App was created in "owns" it. Members of that # Org have differing access, dependent on their role within the Org. owner = FlexibleForeignKey("sentry.Organization", related_name="owned_sentry_apps") name = models.TextField() slug = models.CharField(max_length=SENTRY_APP_SLUG_MAX_LENGTH, unique=True) author = models.TextField(null=True) status = BoundedPositiveIntegerField(default=SentryAppStatus.UNPUBLISHED, choices=SentryAppStatus.as_choices(), db_index=True) uuid = models.CharField(max_length=64, default=default_uuid) redirect_url = models.URLField(null=True) webhook_url = models.URLField(max_length=512, null=True) # does the application subscribe to `event.alert`, # meaning can it be used in alert rules as a {service} ? is_alertable = models.BooleanField(default=False) # does the application need to wait for verification # on behalf of the external service to know if its installations # are successfully installed ? verify_install = models.BooleanField(default=True) events = ArrayField(of=models.TextField, null=True) overview = models.TextField(null=True) schema = EncryptedJsonField(default=dict) date_added = models.DateTimeField(default=timezone.now) date_updated = models.DateTimeField(default=timezone.now) date_published = models.DateTimeField(null=True, blank=True) creator_user = FlexibleForeignKey("sentry.User", null=True, on_delete=models.SET_NULL, db_constraint=False) creator_label = models.TextField(null=True) class Meta: app_label = "sentry" db_table = "sentry_sentryapp" @classmethod def visible_for_user(cls, request): from sentry.auth.superuser import is_active_superuser if is_active_superuser(request): return cls.objects.all() return cls.objects.filter(status=SentryAppStatus.PUBLISHED) # this method checks if a user from a sentry app has permission to a specific project # for now, only checks if app is installed on the org of the project @classmethod def check_project_permission_for_sentry_app_user(cls, user, project): assert user.is_sentry_app # if the user exists, so should the sentry_app sentry_app = cls.objects.get(proxy_user=user) return sentry_app.is_installed_on(project.organization) @property def is_published(self): return self.status == SentryAppStatus.PUBLISHED @property def is_unpublished(self): return self.status == SentryAppStatus.UNPUBLISHED @property def is_internal(self): return self.status == SentryAppStatus.INTERNAL @property def is_publish_request_inprogress(self): return self.status == SentryAppStatus.PUBLISH_REQUEST_INPROGRESS @property def slug_for_metrics(self): if self.is_internal: return "internal" if self.is_unpublished: return "unpublished" return self.slug def save(self, *args, **kwargs): self.date_updated = timezone.now() return super().save(*args, **kwargs) def is_installed_on(self, organization): return SentryAppInstallation.objects.filter( organization=organization, sentry_app=self, ).exists() def build_signature(self, body): secret = self.application.client_secret return hmac.new(key=secret.encode("utf-8"), msg=body.encode("utf-8"), digestmod=sha256).hexdigest() def show_auth_info(self, access): encoded_scopes = set({"%s" % scope for scope in list(access.scopes)}) return set(self.scope_list).issubset(encoded_scopes)
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey('sentry.Project', null=True) logger = models.CharField(max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=( (GroupStatus.UNRESOLVED, _('Unresolved')), (GroupStatus.RESOLVED, _('Resolved')), (GroupStatus.IGNORED, _('Ignored')), ), db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey('sentry.Release', null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) # deprecated, do not use. GroupShare has superseded is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = (("can_view", "Can view"), ) index_together = (('project', 'first_release'), ) unique_together = (('project', 'short_id'), ) __repr__ = sane_repr('project_id') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) if self.times_seen is None: self.times_seen = 1 self.score = type(self).calculate_score( times_seen=self.times_seen, last_seen=self.last_seen, ) super(Group, self).save(*args, **kwargs) def get_absolute_url(self, params=None): url = reverse('sentry-organization-issue', args=[self.organization.slug, self.id]) if params: url = url + '?' + urlencode(params) return absolute_uri(url) @property def qualified_short_id(self): if self.short_id is not None: return '%s-%s' % ( self.project.slug.upper(), base32_encode(self.short_id), ) def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_ignored(self): return self.get_status() == GroupStatus.IGNORED # TODO(dcramer): remove in 9.0 / after plugins no long ref is_muted = is_ignored def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze status = self.status if status == GroupStatus.IGNORED: try: snooze = GroupSnooze.objects.get(group=self) except GroupSnooze.DoesNotExist: pass else: if not snooze.is_valid(group=self): status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return status def get_share_id(self): from sentry.models import GroupShare try: return GroupShare.objects.filter(group_id=self.id, ).values_list( 'uuid', flat=True)[0] except IndexError: # Otherwise it has not been shared yet. return None @classmethod def from_share_id(cls, share_id): if not share_id or len(share_id) != 32: raise cls.DoesNotExist from sentry.models import GroupShare return cls.objects.get(id=GroupShare.objects.filter( uuid=share_id, ).values_list('group_id'), ) def get_score(self): return type(self).calculate_score(self.times_seen, self.last_seen) def get_latest_event(self): if not hasattr(self, '_latest_event'): self._latest_event = self.get_latest_event_for_environments() return self._latest_event def get_latest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.LATEST, environments=environments, issue_id=self.id, project_id=self.project_id) def get_oldest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.OLDEST, environments=environments, issue_id=self.id, project_id=self.project_id) def get_first_release(self): if self.first_release_id is None: return tagstore.get_first_release(self.project_id, self.id) return self.first_release.version def get_last_release(self): return tagstore.get_last_release(self.project_id, self.id) def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get('type', 'default') def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ return self.data['metadata'] @property def title(self): et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) def location(self): et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) def error(self): warnings.warn('Group.error is deprecated, use Group.title', DeprecationWarning) return self.title error.short_description = _('error') @property def message_short(self): warnings.warn('Group.message_short is deprecated, use Group.title', DeprecationWarning) return self.title @property def organization(self): return self.project.organization @property def checksum(self): warnings.warn('Group.checksum is no longer used', DeprecationWarning) return '' def get_email_subject(self): return '%s - %s' % (self.qualified_short_id.encode('utf-8'), self.title.encode('utf-8')) def count_users_seen(self): return tagstore.get_groups_user_counts([self.project_id], [self.id], environment_ids=None)[self.id] @classmethod def calculate_score(cls, times_seen, last_seen): return math.log(float(times_seen or 1)) * 600 + float( last_seen.strftime('%s'))
class Release(Model): """ A release is generally created when a new version is pushed into a production state. """ __core__ = False organization = FlexibleForeignKey('sentry.Organization') projects = models.ManyToManyField( 'sentry.Project', related_name='releases', through=ReleaseProject ) # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) version = models.CharField(max_length=DB_VERSION_LENGTH) # ref might be the branch name being released ref = models.CharField(max_length=DB_VERSION_LENGTH, null=True, blank=True) url = models.URLField(null=True, blank=True) date_added = models.DateTimeField(default=timezone.now) # DEPRECATED - not available in UI or editable from API date_started = models.DateTimeField(null=True, blank=True) date_released = models.DateTimeField(null=True, blank=True) # arbitrary data recorded with the release data = JSONField(default={}) new_groups = BoundedPositiveIntegerField(default=0) # generally the release manager, or the person initiating the process owner = FlexibleForeignKey('sentry.User', null=True, blank=True) # materialized stats commit_count = BoundedPositiveIntegerField(null=True) last_commit_id = BoundedPositiveIntegerField(null=True) authors = ArrayField(null=True) total_deploys = BoundedPositiveIntegerField(null=True) last_deploy_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_release' unique_together = (('organization', 'version'), ) __repr__ = sane_repr('organization_id', 'version') @staticmethod def is_valid_version(value): return not (any(c in value for c in BAD_RELEASE_CHARS) or value in ('.', '..') or not value) @classmethod def get_cache_key(cls, organization_id, version): return 'release:3:%s:%s' % (organization_id, md5_text(version).hexdigest()) @classmethod def get_lock_key(cls, organization_id, release_id): return 'releasecommits:{}:{}'.format(organization_id, release_id) @classmethod def get(cls, project, version): cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release is None: try: release = cls.objects.get( organization_id=project.organization_id, projects=project, version=version, ) except cls.DoesNotExist: release = -1 cache.set(cache_key, release, 300) if release == -1: return return release @classmethod def get_or_create(cls, project, version, date_added=None): from sentry.models import Project if date_added is None: date_added = timezone.now() cache_key = cls.get_cache_key(project.organization_id, version) release = cache.get(cache_key) if release in (None, -1): # TODO(dcramer): if the cache result is -1 we could attempt a # default create here instead of default get project_version = ('%s-%s' % (project.slug, version))[:DB_VERSION_LENGTH] releases = list( cls.objects.filter( organization_id=project.organization_id, version__in=[version, project_version], projects=project ) ) if releases: try: release = [r for r in releases if r.version == project_version][0] except IndexError: release = releases[0] else: try: with transaction.atomic(): release = cls.objects.create( organization_id=project.organization_id, version=version, date_added=date_added, total_deploys=0, ) except IntegrityError: release = cls.objects.get( organization_id=project.organization_id, version=version ) release.add_project(project) if not project.flags.has_releases: project.flags.has_releases = True project.update(flags=F('flags').bitor(Project.flags.has_releases)) # TODO(dcramer): upon creating a new release, check if it should be # the new "latest release" for this project cache.set(cache_key, release, 3600) return release @classmethod def merge(cls, to_release, from_releases): # The following models reference release: # ReleaseCommit.release # ReleaseEnvironment.release_id # ReleaseProject.release # GroupRelease.release_id # GroupResolution.release # Group.first_release # ReleaseFile.release from sentry.models import ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, Group, GroupRelease, GroupResolution ) model_list = ( ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseProject, ReleaseProjectEnvironment, GroupRelease, GroupResolution ) for release in from_releases: for model in model_list: if hasattr(model, 'release'): update_kwargs = {'release': to_release} else: update_kwargs = {'release_id': to_release.id} try: with transaction.atomic(): model.objects.filter(release_id=release.id).update(**update_kwargs) except IntegrityError: for item in model.objects.filter(release_id=release.id): try: with transaction.atomic(): model.objects.filter(id=item.id).update(**update_kwargs) except IntegrityError: item.delete() Group.objects.filter(first_release=release).update(first_release=to_release) release.delete() @property def short_version(self): return Release.get_display_version(self.version) @staticmethod def get_display_version(version): match = _dotted_path_prefix_re.match(version) if match is not None: version = version[match.end():] if _sha1_re.match(version): return version[:7] return version def add_dist(self, name, date_added=None): from sentry.models import Distribution if date_added is None: date_added = timezone.now() return Distribution.objects.get_or_create( release=self, name=name, defaults={ 'date_added': date_added, 'organization_id': self.organization_id, } )[0] def get_dist(self, name): from sentry.models import Distribution try: return Distribution.objects.get(name=name, release=self) except Distribution.DoesNotExist: pass def add_project(self, project): """ Add a project to this release. Returns True if the project was added and did not already exist. """ from sentry.models import Project try: with transaction.atomic(): ReleaseProject.objects.create(project=project, release=self) if not project.flags.has_releases: project.flags.has_releases = True project.update( flags=F('flags').bitor(Project.flags.has_releases), ) except IntegrityError: return False else: return True def set_refs(self, refs, user, fetch=False): from sentry.api.exceptions import InvalidRepository from sentry.models import Commit, ReleaseHeadCommit, Repository from sentry.tasks.commits import fetch_commits # TODO: this does the wrong thing unless you are on the most # recent release. Add a timestamp compare? prev_release = type(self).objects.filter( organization_id=self.organization_id, projects__in=self.projects.all(), ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).exclude(version=self.version).order_by('-sort').first() names = {r['repository'] for r in refs} repos = list( Repository.objects.filter( organization_id=self.organization_id, name__in=names, ) ) repos_by_name = {r.name: r for r in repos} invalid_repos = names - set(repos_by_name.keys()) if invalid_repos: raise InvalidRepository('Invalid repository names: %s' % ','.join(invalid_repos)) for ref in refs: repo = repos_by_name[ref['repository']] commit = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=ref['commit'], )[0] # update head commit for repo/release if exists ReleaseHeadCommit.objects.create_or_update( organization_id=self.organization_id, repository_id=repo.id, release=self, values={ 'commit': commit, } ) if fetch: fetch_commits.apply_async( kwargs={ 'release_id': self.id, 'user_id': user.id, 'refs': refs, 'prev_release_id': prev_release and prev_release.id, } ) def set_commits(self, commit_list): """ Bind a list of commits to this release. These should be ordered from newest to oldest. This will clear any existing commit log and replace it with the given commits. """ # TODO(dcramer): this function could use some cleanup/refactoring as its a bit unwieldly from sentry.models import ( Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus, ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest ) from sentry.plugins.providers.repository import RepositoryProvider commit_list = [ c for c in commit_list if not RepositoryProvider.should_ignore_commit(c.get('message', '')) ] lock_key = type(self).get_lock_key(self.organization_id, self.id) lock = locks.get(lock_key, duration=10) with TimedRetryPolicy(10)(lock.acquire): with transaction.atomic(): # TODO(dcramer): would be good to optimize the logic to avoid these # deletes but not overly important ReleaseCommit.objects.filter( release=self, ).delete() authors = {} repos = {} commit_author_by_commit = {} head_commit_by_repo = {} latest_commit = None for idx, data in enumerate(commit_list): repo_name = data.get('repository' ) or 'organization-{}'.format(self.organization_id) if repo_name not in repos: repos[repo_name] = repo = Repository.objects.get_or_create( organization_id=self.organization_id, name=repo_name, )[0] else: repo = repos[repo_name] author_email = data.get('author_email') if author_email is None and data.get('author_name'): author_email = ( re.sub(r'[^a-zA-Z0-9\-_\.]*', '', data['author_name']).lower() + '@localhost' ) if not author_email: author = None elif author_email not in authors: authors[author_email] = author = CommitAuthor.objects.get_or_create( organization_id=self.organization_id, email=author_email, defaults={ 'name': data.get('author_name'), } )[0] if data.get('author_name') and author.name != data['author_name']: author.update(name=data['author_name']) else: author = authors[author_email] defaults = { 'message': data.get('message'), 'author': author, 'date_added': data.get('timestamp') or timezone.now(), } commit, created = Commit.objects.get_or_create( organization_id=self.organization_id, repository_id=repo.id, key=data['id'], defaults=defaults, ) if author is None: author = commit.author commit_author_by_commit[commit.id] = author patch_set = data.get('patch_set', []) for patched_file in patch_set: CommitFileChange.objects.get_or_create( organization_id=self.organization.id, commit=commit, filename=patched_file['path'], type=patched_file['type'], ) if not created: update_kwargs = {} if commit.message is None and defaults['message'] is not None: update_kwargs['message'] = defaults['message'] if commit.author_id is None and defaults['author'] is not None: update_kwargs['author'] = defaults['author'] if update_kwargs: commit.update(**update_kwargs) ReleaseCommit.objects.create( organization_id=self.organization_id, release=self, commit=commit, order=idx, ) if latest_commit is None: latest_commit = commit head_commit_by_repo.setdefault(repo.id, commit.id) self.update( commit_count=len(commit_list), authors=[ six.text_type(a_id) for a_id in ReleaseCommit.objects.filter( release=self, commit__author_id__isnull=False, ).values_list('commit__author_id', flat=True).distinct() ], last_commit_id=latest_commit.id if latest_commit else None, ) # fill any missing ReleaseHeadCommit entries for repo_id, commit_id in six.iteritems(head_commit_by_repo): try: with transaction.atomic(): ReleaseHeadCommit.objects.create( organization_id=self.organization_id, release_id=self.id, repository_id=repo_id, commit_id=commit_id, ) except IntegrityError: pass release_commits = list(ReleaseCommit.objects.filter(release=self) .select_related('commit').values('commit_id', 'commit__key')) commit_resolutions = list( GroupLink.objects.filter( linked_type=GroupLink.LinkedType.commit, linked_id__in=[rc['commit_id'] for rc in release_commits], ).values_list('group_id', 'linked_id') ) commit_group_authors = [ (cr[0], # group_id commit_author_by_commit.get(cr[1])) for cr in commit_resolutions] pr_ids_by_merge_commit = list(PullRequest.objects.filter( merge_commit_sha__in=[rc['commit__key'] for rc in release_commits], ).values_list('id', flat=True)) pull_request_resolutions = list( GroupLink.objects.filter( relationship=GroupLink.Relationship.resolves, linked_type=GroupLink.LinkedType.pull_request, linked_id__in=pr_ids_by_merge_commit, ).values_list('group_id', 'linked_id') ) pr_authors = list(PullRequest.objects.filter( id__in=[prr[1] for prr in pull_request_resolutions], ).select_related('author')) pr_authors_dict = { pra.id: pra.author for pra in pr_authors } pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1])) for prr in pull_request_resolutions] user_by_author = {None: None} for group_id, author in itertools.chain(commit_group_authors, pull_request_group_authors): if author not in user_by_author: try: user_by_author[author] = author.find_users()[0] except IndexError: user_by_author[author] = None actor = user_by_author[author] with transaction.atomic(): GroupResolution.objects.create_or_update( group_id=group_id, values={ 'release': self, 'type': GroupResolution.Type.in_release, 'status': GroupResolution.Status.resolved, 'actor_id': actor.id if actor else None, }, ) Group.objects.filter( id=group_id, ).update(status=GroupStatus.RESOLVED)
class ReleaseFile(Model): r""" A ReleaseFile is an association between a Release and a File. The ident of the file should be sha1(name) or sha1(name '\x00\x00' dist.name) and must be unique per release. """ __core__ = False organization = FlexibleForeignKey("sentry.Organization") # DEPRECATED project_id = BoundedPositiveIntegerField(null=True) release = FlexibleForeignKey("sentry.Release") file = FlexibleForeignKey("sentry.File") ident = models.CharField(max_length=40) name = models.TextField() dist = FlexibleForeignKey("sentry.Distribution", null=True) __repr__ = sane_repr("release", "ident") class Meta: unique_together = (("release", "ident"),) index_together = (("release", "name"),) app_label = "sentry" db_table = "sentry_releasefile" def save(self, *args, **kwargs): if not self.ident and self.name: dist = self.dist_id and self.dist.name or None self.ident = type(self).get_ident(self.name, dist) return super().save(*args, **kwargs) def update(self, *args, **kwargs): # If our name is changing, we must also change the ident if "name" in kwargs and "ident" not in kwargs: dist = kwargs.get("dist") or self.dist kwargs["ident"] = self.ident = type(self).get_ident( kwargs["name"], dist and dist.name or dist ) return super().update(*args, **kwargs) @classmethod def get_ident(cls, name, dist=None): if dist is not None: return sha1_text(name + "\x00\x00" + dist).hexdigest() return sha1_text(name).hexdigest() @classmethod def normalize(cls, url): """Transforms a full absolute url into 2 or 4 generalized options * the original url as input * (optional) original url without querystring * the full url, but stripped of scheme and netloc * (optional) full url without scheme and netloc or querystring """ # Always ignore the fragment scheme, netloc, path, query, _ = urlsplit(url) uri_without_fragment = (scheme, netloc, path, query, "") uri_relative = ("", "", path, query, "") uri_without_query = (scheme, netloc, path, "", "") uri_relative_without_query = ("", "", path, "", "") urls = [urlunsplit(uri_without_fragment)] if query: urls.append(urlunsplit(uri_without_query)) urls.append("~" + urlunsplit(uri_relative)) if query: urls.append("~" + urlunsplit(uri_relative_without_query)) return urls
class ReleaseEnvironment(Model): __core__ = False organization_id = BoundedPositiveIntegerField(db_index=True) project_id = BoundedPositiveIntegerField(db_index=True) release_id = BoundedPositiveIntegerField(db_index=True) environment_id = BoundedPositiveIntegerField(db_index=True) first_seen = models.DateTimeField(default=timezone.now) last_seen = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = 'sentry' db_table = 'sentry_environmentrelease' unique_together = (('project_id', 'release_id', 'environment_id'),) __repr__ = sane_repr('project_id', 'release_id', 'environment_id') @classmethod def get_cache_key(cls, project_id, release_id, environment_id): return 'releaseenv:1:{}:{}:{}'.format( project_id, release_id, environment_id, ) @classmethod def get_or_create(cls, project, release, environment, datetime, **kwargs): cache_key = cls.get_cache_key(project.id, release.id, environment.id) instance = cache.get(cache_key) if instance is None: try: with transaction.atomic(): instance, created = cls.objects.create( release_id=release.id, project_id=project.id, organization_id=project.organization_id, environment_id=environment.id, first_seen=datetime, last_seen=datetime, ), True except IntegrityError: instance, created = cls.objects.get( release_id=release.id, project_id=project.id, organization_id=project.organization_id, environment_id=environment.id, ), False cache.set(cache_key, instance, 3600) else: created = False # TODO(dcramer): this would be good to buffer, but until then we minimize # updates to once a minute, and allow Postgres to optimistically skip # it even if we can't if not created and instance.last_seen < datetime - timedelta(seconds=60): cls.objects.filter( id=instance.id, last_seen__lt=datetime - timedelta(seconds=60), ).update( last_seen=datetime, ) instance.last_seen = datetime cache.set(cache_key, instance, 3600) return instance
class FileBlob(Model): __core__ = False path = models.TextField(null=True) size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, unique=True) timestamp = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = 'sentry' db_table = 'sentry_fileblob' @classmethod def from_file(cls, fileobj): """ Retrieve a list of FileBlobIndex instances for the given file. If not already present, this will cause it to be stored. >>> blobs = FileBlob.from_file(fileobj) """ size = 0 checksum = sha1(b'') for chunk in fileobj: size += len(chunk) checksum.update(chunk) checksum = checksum.hexdigest() # TODO(dcramer): the database here is safe, but if this lock expires # and duplicate files are uploaded then we need to prune one lock = locks.get('fileblob:upload:{}'.format(checksum), duration=60 * 10) with TimedRetryPolicy(60)(lock.acquire): # test for presence try: existing = FileBlob.objects.get(checksum=checksum) except FileBlob.DoesNotExist: pass else: return existing blob = cls( size=size, checksum=checksum, ) blob.path = cls.generate_unique_path(blob.timestamp) storage = get_storage() storage.save(blob.path, fileobj) blob.save() metrics.timing('filestore.blob-size', size) return blob @classmethod def generate_unique_path(cls, timestamp): pieces = [six.text_type(x) for x in divmod(int(timestamp.strftime('%s')), ONE_DAY)] pieces.append(uuid4().hex) return u'/'.join(pieces) def delete(self, *args, **kwargs): lock = locks.get('fileblob:upload:{}'.format(self.checksum), duration=60 * 10) with TimedRetryPolicy(60)(lock.acquire): if self.path: self.deletefile(commit=False) super(FileBlob, self).delete(*args, **kwargs) def deletefile(self, commit=False): assert self.path storage = get_storage() storage.delete(self.path) self.path = None if commit: self.save() def getfile(self): """ Return a file-like object for this File's content. >>> with blob.getfile() as src, open('/tmp/localfile', 'wb') as dst: >>> for chunk in src.chunks(): >>> dst.write(chunk) """ assert self.path storage = get_storage() return storage.open(self.path)
class Project(Model): """ Projects are permission based namespaces which generally are the top level entry point for all data. """ __core__ = True slug = models.SlugField(null=True) name = models.CharField(max_length=200) forced_color = models.CharField(max_length=6, null=True, blank=True) organization = FlexibleForeignKey('sentry.Organization') # DEPRECATED. use teams instead. team = FlexibleForeignKey('sentry.Team', null=True, on_delete=models.SET_NULL) teams = models.ManyToManyField('sentry.Team', related_name='teams', through=ProjectTeam) public = models.BooleanField(default=False) date_added = models.DateTimeField(default=timezone.now) status = BoundedPositiveIntegerField( default=0, choices=( (ObjectStatus.VISIBLE, _('Active')), (ObjectStatus.PENDING_DELETION, _('Pending Deletion')), (ObjectStatus.DELETION_IN_PROGRESS, _('Deletion in Progress')), ), db_index=True) # projects that were created before this field was present # will have their first_event field set to date_added first_event = models.DateTimeField(null=True) flags = BitField(flags=(('has_releases', 'This Project has sent release data'), ), default=0, null=True) objects = ProjectManager(cache_fields=[ 'pk', 'slug', ]) platform = models.CharField(max_length=64, null=True) class Meta: app_label = 'sentry' db_table = 'sentry_project' unique_together = (('organization', 'slug'), ) __repr__ = sane_repr('team_id', 'name', 'slug') def __unicode__(self): return u'%s (%s)' % (self.name, self.slug) def next_short_id(self): from sentry.models import Counter return Counter.increment(self) def save(self, *args, **kwargs): if not self.slug: lock = locks.get('slug:project', duration=5) with TimedRetryPolicy(10)(lock.acquire): slugify_instance(self, self.name, organization=self.organization) super(Project, self).save(*args, **kwargs) else: super(Project, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri('/{}/{}/'.format(self.organization.slug, self.slug)) def is_internal_project(self): for value in (settings.SENTRY_FRONTEND_PROJECT, settings.SENTRY_PROJECT): if six.text_type(self.id) == six.text_type(value) or six.text_type( self.slug) == six.text_type(value): return True return False # TODO: Make these a mixin def update_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.set_value(self, *args, **kwargs) def get_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.get_value(self, *args, **kwargs) def delete_option(self, *args, **kwargs): from sentry.models import ProjectOption return ProjectOption.objects.unset_value(self, *args, **kwargs) @property def callsign(self): return self.slug.upper() @property def color(self): if self.forced_color is not None: return '#%s' % self.forced_color return get_hashed_color(self.callsign or self.slug) @property def member_set(self): from sentry.models import OrganizationMember return self.organization.member_set.filter( id__in=OrganizationMember.objects.filter( organizationmemberteam__is_active=True, organizationmemberteam__team__in=self.teams.all(), ).values('id'), user__is_active=True, ).distinct() def has_access(self, user, access=None): from sentry.models import AuthIdentity, OrganizationMember warnings.warn('Project.has_access is deprecated.', DeprecationWarning) queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) try: member = queryset.get() except OrganizationMember.DoesNotExist: return False try: auth_identity = AuthIdentity.objects.get( auth_provider__organization=self.organization_id, user=member.user_id, ) except AuthIdentity.DoesNotExist: return True return auth_identity.is_valid(member) def get_audit_log_data(self): return { 'id': self.id, 'slug': self.slug, 'name': self.name, 'status': self.status, 'public': self.public, } def get_full_name(self): team_name = self.teams.values_list('name', flat=True).first() if team_name is not None and team_name not in self.name: return '%s %s' % (team_name, self.name) return self.name def get_notification_recipients(self, user_option): from sentry.models import UserOption alert_settings = dict((o.user_id, int(o.value)) for o in UserOption.objects.filter( project=self, key=user_option, )) disabled = set(u for u, v in six.iteritems(alert_settings) if v == 0) member_set = set( self.member_set.exclude(user__in=disabled, ).values_list( 'user', flat=True)) # determine members default settings members_to_check = set(u for u in member_set if u not in alert_settings) if members_to_check: disabled = set((uo.user_id for uo in UserOption.objects.filter( key='subscribe_by_default', user__in=members_to_check, ) if uo.value == '0')) member_set = [x for x in member_set if x not in disabled] return member_set def get_mail_alert_subscribers(self): user_ids = self.get_notification_recipients('mail:alert') if not user_ids: return [] from sentry.models import User return list(User.objects.filter(id__in=user_ids)) def is_user_subscribed_to_mail_alerts(self, user): from sentry.models import UserOption is_enabled = UserOption.objects.get_value(user, 'mail:alert', project=self) if is_enabled is None: is_enabled = UserOption.objects.get_value(user, 'subscribe_by_default', '1') == '1' else: is_enabled = bool(is_enabled) return is_enabled def transfer_to(self, team): # NOTE: this will only work properly if the new team is in a different # org than the existing one, which is currently the only use case in # production # TODO(jess): refactor this to make it an org transfer only from sentry.models import ( Environment, EnvironmentProject, ProjectTeam, ReleaseProject, ReleaseProjectEnvironment, Rule, ) organization = team.organization old_org_id = self.organization_id org_changed = old_org_id != organization.id self.organization = organization try: with transaction.atomic(): self.update(organization=organization, ) except IntegrityError: slugify_instance(self, self.name, organization=organization) self.update( slug=self.slug, organization=organization, ) # Both environments and releases are bound at an organization level. # Due to this, when you transfer a project into another org, we have to # handle this behavior somehow. We really only have two options here: # * Copy over all releases/environments into the new org and handle de-duping # * Delete the bindings and let them reform with new data. # We're generally choosing to just delete the bindings since new data # flowing in will recreate links correctly. The tradeoff is that # historical data is lost, but this is a compromise we're willing to # take and a side effect of allowing this feature. There are exceptions # to this however, such as rules, which should maintain their # configuration when moved across organizations. if org_changed: for model in ReleaseProject, ReleaseProjectEnvironment, EnvironmentProject: model.objects.filter(project_id=self.id, ).delete() # this is getting really gross, but make sure there aren't lingering associations # with old orgs or teams ProjectTeam.objects.filter( project=self, team__organization_id=old_org_id).delete() rules_by_environment_id = defaultdict(set) for rule_id, environment_id in Rule.objects.filter( project_id=self.id, environment_id__isnull=False).values_list( 'id', 'environment_id'): rules_by_environment_id[environment_id].add(rule_id) environment_names = dict( Environment.objects.filter( id__in=rules_by_environment_id, ).values_list('id', 'name')) for environment_id, rule_ids in rules_by_environment_id.items(): Rule.objects.filter(id__in=rule_ids).update( environment_id=Environment.get_or_create( self, environment_names[environment_id], ).id, ) # ensure this actually exists in case from team was null self.add_team(team) def add_team(self, team): try: with transaction.atomic(): ProjectTeam.objects.create(project=self, team=team) except IntegrityError: return False else: return True def remove_team(self, team): ProjectTeam.objects.filter( project=self, team=team, ).delete() def get_security_token(self): lock = locks.get(self.get_lock_key(), duration=5) with TimedRetryPolicy(10)(lock.acquire): security_token = self.get_option('sentry:token', None) if security_token is None: security_token = uuid1().hex self.update_option('sentry:token', security_token) return security_token def get_lock_key(self): return 'project_token:%s' % self.id
class AuditLogEntry(Model): __core__ = False organization = FlexibleForeignKey('sentry.Organization') actor_label = models.CharField(max_length=64, null=True, blank=True) # if the entry was created via a user actor = FlexibleForeignKey('sentry.User', related_name='audit_actors', null=True, blank=True) # if the entry was created via an api key actor_key = FlexibleForeignKey('sentry.ApiKey', null=True, blank=True) target_object = BoundedPositiveIntegerField(null=True) target_user = FlexibleForeignKey( 'sentry.User', null=True, blank=True, related_name='audit_targets' ) # TODO(dcramer): we want to compile this mapping into JSX for the UI event = BoundedPositiveIntegerField( choices=( # We emulate github a bit with event naming (AuditLogEntryEvent.MEMBER_INVITE, 'member.invite'), (AuditLogEntryEvent.MEMBER_ADD, 'member.add'), (AuditLogEntryEvent.MEMBER_ACCEPT, 'member.accept-invite'), (AuditLogEntryEvent.MEMBER_REMOVE, 'member.remove'), (AuditLogEntryEvent.MEMBER_EDIT, 'member.edit'), (AuditLogEntryEvent.MEMBER_JOIN_TEAM, 'member.join-team'), (AuditLogEntryEvent.MEMBER_LEAVE_TEAM, 'member.leave-team'), (AuditLogEntryEvent.TEAM_ADD, 'team.create'), (AuditLogEntryEvent.TEAM_EDIT, 'team.edit'), (AuditLogEntryEvent.TEAM_REMOVE, 'team.remove'), (AuditLogEntryEvent.PROJECT_ADD, 'project.create'), (AuditLogEntryEvent.PROJECT_EDIT, 'project.edit'), (AuditLogEntryEvent.PROJECT_REMOVE, 'project.remove'), (AuditLogEntryEvent.PROJECT_SET_PUBLIC, 'project.set-public'), (AuditLogEntryEvent.PROJECT_SET_PRIVATE, 'project.set-private'), (AuditLogEntryEvent.PROJECT_REQUEST_TRANSFER, 'project.request-transfer'), (AuditLogEntryEvent.PROJECT_ACCEPT_TRANSFER, 'project.accept-transfer'), (AuditLogEntryEvent.ORG_ADD, 'org.create'), (AuditLogEntryEvent.ORG_EDIT, 'org.edit'), (AuditLogEntryEvent.ORG_REMOVE, 'org.remove'), (AuditLogEntryEvent.ORG_RESTORE, 'org.restore'), (AuditLogEntryEvent.TAGKEY_REMOVE, 'tagkey.remove'), (AuditLogEntryEvent.PROJECTKEY_ADD, 'projectkey.create'), (AuditLogEntryEvent.PROJECTKEY_EDIT, 'projectkey.edit'), (AuditLogEntryEvent.PROJECTKEY_REMOVE, 'projectkey.remove'), (AuditLogEntryEvent.PROJECTKEY_ENABLE, 'projectkey.enable'), (AuditLogEntryEvent.PROJECTKEY_DISABLE, 'projectkey.disable'), (AuditLogEntryEvent.SSO_ENABLE, 'sso.enable'), (AuditLogEntryEvent.SSO_DISABLE, 'sso.disable'), (AuditLogEntryEvent.SSO_EDIT, 'sso.edit'), (AuditLogEntryEvent.SSO_IDENTITY_LINK, 'sso-identity.link'), (AuditLogEntryEvent.APIKEY_ADD, 'api-key.create'), (AuditLogEntryEvent.APIKEY_EDIT, 'api-key.edit'), (AuditLogEntryEvent.APIKEY_REMOVE, 'api-key.remove'), (AuditLogEntryEvent.RULE_ADD, 'rule.create'), (AuditLogEntryEvent.RULE_EDIT, 'rule.edit'), (AuditLogEntryEvent.RULE_REMOVE, 'rule.remove'), (AuditLogEntryEvent.SET_ONDEMAND, 'ondemand.edit'), (AuditLogEntryEvent.SERVICEHOOK_ADD, 'serivcehook.create'), (AuditLogEntryEvent.SERVICEHOOK_EDIT, 'serivcehook.edit'), (AuditLogEntryEvent.SERVICEHOOK_REMOVE, 'serivcehook.remove'), (AuditLogEntryEvent.SERVICEHOOK_ENABLE, 'serivcehook.enable'), (AuditLogEntryEvent.SERVICEHOOK_DISABLE, 'serivcehook.disable'), ) ) ip_address = models.GenericIPAddressField(null=True, unpack_ipv4=True) data = GzippedDictField() datetime = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_auditlogentry' __repr__ = sane_repr('organization_id', 'type') def save(self, *args, **kwargs): if not self.actor_label: assert self.actor or self.actor_key if self.actor: self.actor_label = self.actor.username else: self.actor_label = self.actor_key.key super(AuditLogEntry, self).save(*args, **kwargs) def get_actor_name(self): if self.actor: return self.actor.get_display_name() elif self.actor_key: return self.actor_key.key + ' (api key)' return self.actor_label def get_note(self): if self.event == AuditLogEntryEvent.MEMBER_INVITE: return 'invited member %s' % (self.data['email'], ) elif self.event == AuditLogEntryEvent.MEMBER_ADD: if self.target_user == self.actor: return 'joined the organization' return 'added member %s' % (self.target_user.get_display_name(), ) elif self.event == AuditLogEntryEvent.MEMBER_ACCEPT: return 'accepted the membership invite' elif self.event == AuditLogEntryEvent.MEMBER_REMOVE: if self.target_user == self.actor: return 'left the organization' return 'removed member %s' % ( self.data.get('email') or self.target_user.get_display_name(), ) elif self.event == AuditLogEntryEvent.MEMBER_EDIT: return 'edited member %s' % ( self.data.get('email') or self.target_user.get_display_name(), ) elif self.event == AuditLogEntryEvent.MEMBER_JOIN_TEAM: if self.target_user == self.actor: return 'joined team %s' % (self.data['team_slug'], ) return 'added %s to team %s' % ( self.data.get('email') or self.target_user.get_display_name(), self.data['team_slug'], ) elif self.event == AuditLogEntryEvent.MEMBER_LEAVE_TEAM: if self.target_user == self.actor: return 'left team %s' % (self.data['team_slug'], ) return 'removed %s from team %s' % ( self.data.get('email') or self.target_user.get_display_name(), self.data['team_slug'], ) elif self.event == AuditLogEntryEvent.ORG_ADD: return 'created the organization' elif self.event == AuditLogEntryEvent.ORG_EDIT: return 'edited the organization setting(s): ' + (', '.join(u'{} {}'.format(k, v) for k, v in self.data.items())) elif self.event == AuditLogEntryEvent.ORG_REMOVE: return 'removed the organization' elif self.event == AuditLogEntryEvent.ORG_RESTORE: return 'restored the organization' elif self.event == AuditLogEntryEvent.TEAM_ADD: return 'created team %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.TEAM_EDIT: return 'edited team %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.TEAM_REMOVE: return 'removed team %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_ADD: return 'created project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_EDIT: return 'edited project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_REMOVE: return 'removed project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_REQUEST_TRANSFER: return 'requested to transfer project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.PROJECT_ACCEPT_TRANSFER: return 'accepted transfer of project %s' % (self.data['slug'], ) elif self.event == AuditLogEntryEvent.TAGKEY_REMOVE: return 'removed tags matching %s = *' % (self.data['key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_ADD: return 'added project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_EDIT: return 'edited project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_REMOVE: return 'removed project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_ENABLE: return 'enabled project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.PROJECTKEY_DISABLE: return 'disabled project key %s' % (self.data['public_key'], ) elif self.event == AuditLogEntryEvent.SSO_ENABLE: return 'enabled sso (%s)' % (self.data['provider'], ) elif self.event == AuditLogEntryEvent.SSO_DISABLE: return 'disabled sso (%s)' % (self.data['provider'], ) elif self.event == AuditLogEntryEvent.SSO_EDIT: return 'edited sso settings' elif self.event == AuditLogEntryEvent.SSO_IDENTITY_LINK: return 'linked their account to a new identity' elif self.event == AuditLogEntryEvent.APIKEY_ADD: return 'added api key %s' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.APIKEY_EDIT: return 'edited api key %s' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.APIKEY_REMOVE: return 'removed api key %s' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.RULE_ADD: return 'added rule "%s"' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.RULE_EDIT: return 'edited rule "%s"' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.RULE_REMOVE: return 'removed rule "%s"' % (self.data['label'], ) elif self.event == AuditLogEntryEvent.SET_ONDEMAND: return 'changed on-demand max spend to $%d' % (self.data['ondemand'] / 100, ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_ADD: return 'added a service hook for "%s"' % (truncatechars(self.data['url'], 64), ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_EDIT: return 'edited the service hook for "%s"' % (truncatechars(self.data['url'], 64), ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_REMOVE: return 'removed the service hook for "%s"' % (truncatechars(self.data['url'], 64), ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_ENABLE: return 'enabled theservice hook for "%s"' % (truncatechars(self.data['url'], 64), ) elif self.event == AuditLogEntryEvent.SERVICEHOOK_DISABLE: return 'disabled the service hook for "%s"' % (truncatechars(self.data['url'], 64), ) return ''
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __include_in_export__ = False project = FlexibleForeignKey("sentry.Project") logger = models.CharField(max_length=64, blank=True, default=str(DEFAULT_LOGGER_NAME), db_index=True) level = BoundedPositiveIntegerField( choices=[(key, str(val)) for key, val in sorted(LOG_LEVELS.items())], default=logging.ERROR, blank=True, db_index=True, ) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column="view") num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField( default=0, choices=( (GroupStatus.UNRESOLVED, _("Unresolved")), (GroupStatus.RESOLVED, _("Resolved")), (GroupStatus.IGNORED, _("Ignored")), ), db_index=True, ) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey("sentry.Release", null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) # deprecated, do not use. GroupShare has superseded is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager(cache_fields=("id", )) class Meta: app_label = "sentry" db_table = "sentry_groupedmessage" verbose_name_plural = _("grouped messages") verbose_name = _("grouped message") permissions = (("can_view", "Can view"), ) index_together = [ ("project", "first_release"), ("project", "id"), ("project", "status", "last_seen", "id"), ] unique_together = ( ("project", "short_id"), ("project", "id"), ) __repr__ = sane_repr("project_id") def __str__(self): return f"({self.times_seen}) {self.error()}" def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) if self.times_seen is None: self.times_seen = 1 self.score = type(self).calculate_score(times_seen=self.times_seen, last_seen=self.last_seen) super().save(*args, **kwargs) def get_absolute_url( self, params: Optional[Mapping[str, str]] = None, event_id: Optional[int] = None, organization_slug: Optional[str] = None, ) -> str: # Built manually in preference to django.urls.reverse, # because reverse has a measured performance impact. event_path = f"events/{event_id}/" if event_id else "" url = "organizations/{org}/issues/{id}/{event_path}{params}".format( # Pass organization_slug if this needs to be called multiple times to avoid n+1 queries org=urlquote(self.organization.slug if organization_slug is None else organization_slug), id=self.id, event_path=event_path, params="?" + urlencode(params) if params else "", ) return absolute_uri(url) @property def qualified_short_id(self): if self.short_id is not None: return f"{self.project.slug.upper()}-{base32_encode(self.short_id)}" def is_over_resolve_age(self): resolve_age = self.project.get_option("sentry:resolve_age", None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_ignored(self): return self.get_status() == GroupStatus.IGNORED def is_unresolved(self): return self.get_status() == GroupStatus.UNRESOLVED # TODO(dcramer): remove in 9.0 / after plugins no long ref is_muted = is_ignored def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze status = self.status if status == GroupStatus.IGNORED: try: snooze = GroupSnooze.objects.get_from_cache(group=self) except GroupSnooze.DoesNotExist: pass else: if not snooze.is_valid(group=self): status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return status def get_share_id(self): from sentry.models import GroupShare try: return GroupShare.objects.filter(group_id=self.id).values_list( "uuid", flat=True)[0] except IndexError: # Otherwise it has not been shared yet. return None @classmethod def from_share_id(cls, share_id): if not share_id or len(share_id) != 32: raise cls.DoesNotExist from sentry.models import GroupShare return cls.objects.get(id__in=GroupShare.objects.filter( uuid=share_id).values_list("group_id")[:1]) def get_score(self): return type(self).calculate_score(self.times_seen, self.last_seen) def get_latest_event(self) -> Optional[Event]: if not hasattr(self, "_latest_event"): self._latest_event = self.get_latest_event_for_environments() return self._latest_event def get_latest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.LATEST, environments=environments, issue_id=self.id, project_id=self.project_id, ) def get_oldest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.OLDEST, environments=environments, issue_id=self.id, project_id=self.project_id, ) def _get_cache_key(self, project_id, group_id, first): return f"g-r:{group_id}-{project_id}-{first}" def __get_release(self, project_id, group_id, first=True, use_cache=True): from sentry.models import GroupRelease, Release orderby = "first_seen" if first else "-last_seen" cache_key = self._get_cache_key(project_id, group_id, first) try: release_version = cache.get(cache_key) if use_cache else None if release_version is None: release_version = Release.objects.get( id__in=GroupRelease.objects.filter(group_id=group_id). order_by(orderby).values("release_id")[:1]).version cache.set(cache_key, release_version, 3600) elif release_version is False: release_version = None return release_version except Release.DoesNotExist: cache.set(cache_key, False, 3600) return None def get_first_release(self): if self.first_release_id is None: first_release = self.__get_release(self.project_id, self.id, True) return first_release return self.first_release.version def get_last_release(self, use_cache=True): return self.__get_release(self.project_id, self.id, False, use_cache=use_cache) def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get("type", "default") def get_event_metadata(self) -> Mapping[str, str]: """ Return the metadata of this issue. See ``sentry.eventtypes``. """ return self.data["metadata"] @property def title(self) -> str: et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) def location(self): et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) def error(self): warnings.warn("Group.error is deprecated, use Group.title", DeprecationWarning) return self.title error.short_description = _("error") @property def message_short(self): warnings.warn("Group.message_short is deprecated, use Group.title", DeprecationWarning) return self.title @property def organization(self): return self.project.organization @property def checksum(self): warnings.warn("Group.checksum is no longer used", DeprecationWarning) return "" def get_email_subject(self): return f"{self.qualified_short_id} - {self.title}" def count_users_seen(self): return tagstore.get_groups_user_counts([self.project_id], [self.id], environment_ids=None, start=self.first_seen)[self.id] @classmethod def calculate_score(cls, times_seen, last_seen): return math.log(float(times_seen or 1)) * 600 + float( last_seen.strftime("%s")) @staticmethod def issues_mapping(group_ids, project_ids, organization): """Create a dictionary of group_ids to their qualified_short_ids""" return { i.id: i.qualified_short_id for i in Group.objects.filter(id__in=group_ids, project_id__in=project_ids, project__organization=organization) }
class FileBlob(Model): __core__ = False storage = models.CharField(max_length=128) storage_options = JSONField() path = models.TextField(null=True) size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, unique=True) timestamp = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = 'sentry' db_table = 'sentry_fileblob' @classmethod def from_file(cls, fileobj): """ Retrieve a FileBlob instance for the given file. If not already present, this will cause it to be stored. >>> blob = FileBlob.from_file(fileobj) """ size = 0 checksum = sha1('') for chunk in fileobj: size += len(chunk) checksum.update(chunk) checksum = checksum.hexdigest() lock_key = 'fileblob:upload:{}'.format(checksum) # TODO(dcramer): the database here is safe, but if this lock expires # and duplicate files are uploaded then we need to prune one with Lock(lock_key, timeout=600): # test for presence try: existing = FileBlob.objects.get(checksum=checksum) except FileBlob.DoesNotExist: pass else: return existing blob = cls( size=size, checksum=checksum, storage=settings.SENTRY_FILESTORE, storage_options=settings.SENTRY_FILESTORE_OPTIONS, ) blob.path = cls.generate_unique_path(blob.timestamp) storage = blob.get_storage() storage.save(blob.path, fileobj) blob.save() metrics.timing('filestore.blob-size', blob.size) return blob @classmethod def generate_unique_path(cls, timestamp): pieces = map(str, divmod(int(timestamp.strftime('%s')), ONE_DAY)) pieces.append('%s' % (uuid4().hex, )) return '/'.join(pieces) def delete(self, *args, **kwargs): if self.path: self.deletefile(commit=False) super(FileBlob, self).delete(*args, **kwargs) def get_storage(self): backend = self.storage options = self.storage_options storage = get_storage_class(backend) return storage(**options) def deletefile(self, commit=False): assert self.path storage = self.get_storage() storage.delete(self.path) self.path = None if commit: self.save() def getfile(self): """ Return a file-like object for this File's content. >>> with blob.getfile() as src, open('/tmp/localfile', 'wb') as dst: >>> for chunk in src.chunks(): >>> dst.write(chunk) """ assert self.path storage = self.get_storage() return storage.open(self.path)
class File(Model): __core__ = False name = models.TextField() type = models.CharField(max_length=64) timestamp = models.DateTimeField(default=timezone.now, db_index=True) headers = JSONField() blobs = models.ManyToManyField("sentry.FileBlob", through="sentry.FileBlobIndex") size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, null=True, db_index=True) # <Legacy fields> # Remove in 8.1 blob = FlexibleForeignKey("sentry.FileBlob", null=True, related_name="legacy_blob") path = models.TextField(null=True) # </Legacy fields> class Meta: app_label = "sentry" db_table = "sentry_file" def _get_chunked_blob(self, mode=None, prefetch=False, prefetch_to=None, delete=True): return ChunkedFileBlobIndexWrapper( FileBlobIndex.objects.filter( file=self).select_related("blob").order_by("offset"), mode=mode, prefetch=prefetch, prefetch_to=prefetch_to, delete=delete, ) def getfile(self, mode=None, prefetch=False): """Returns a file object. By default the file is fetched on demand but if prefetch is enabled the file is fully prefetched into a tempfile before reading can happen. """ impl = self._get_chunked_blob(mode, prefetch) return FileObj(impl, self.name) def save_to(self, path): """Fetches the file and emplaces it at a certain location. The write is done atomically to a tempfile first and then moved over. If the directory does not exist it is created. """ path = os.path.abspath(path) base = os.path.dirname(path) try: os.makedirs(base) except OSError: pass f = None try: f = self._get_chunked_blob(prefetch=True, prefetch_to=base, delete=False).detach_tempfile() # pre-emptively check if the file already exists. # this can happen as a race condition if two processes/threads # are trying to cache the same file and both try to write # at the same time, overwriting each other. Normally this is fine, # but can cause an issue if another process has opened the file # for reading, then the file that was being read gets clobbered. # I don't know if this affects normal filesystems, but it # definitely has an issue if the filesystem is NFS. if not os.path.exists(path): os.rename(f.name, path) f.close() f = None finally: if f is not None: f.close() try: os.remove(f.name) except Exception: pass def putfile(self, fileobj, blob_size=DEFAULT_BLOB_SIZE, commit=True, logger=nooplogger): """ Save a fileobj into a number of chunks. Returns a list of `FileBlobIndex` items. >>> indexes = file.putfile(fileobj) """ results = [] offset = 0 checksum = sha1(b"") while True: contents = fileobj.read(blob_size) if not contents: break checksum.update(contents) blob_fileobj = ContentFile(contents) blob = FileBlob.from_file(blob_fileobj, logger=logger) results.append( FileBlobIndex.objects.create(file=self, blob=blob, offset=offset)) offset += blob.size self.size = offset self.checksum = checksum.hexdigest() metrics.timing("filestore.file-size", offset) if commit: self.save() return results def assemble_from_file_blob_ids(self, file_blob_ids, checksum, commit=True): """ This creates a file, from file blobs and returns a temp file with the contents. """ tf = tempfile.NamedTemporaryFile() with transaction.atomic(): file_blobs = FileBlob.objects.filter(id__in=file_blob_ids).all() # Ensure blobs are in the order and duplication as provided blobs_by_id = {blob.id: blob for blob in file_blobs} file_blobs = [blobs_by_id[blob_id] for blob_id in file_blob_ids] new_checksum = sha1(b"") offset = 0 for blob in file_blobs: FileBlobIndex.objects.create(file=self, blob=blob, offset=offset) for chunk in blob.getfile().chunks(): new_checksum.update(chunk) tf.write(chunk) offset += blob.size self.size = offset self.checksum = new_checksum.hexdigest() if checksum != self.checksum: raise AssembleChecksumMismatch("Checksum mismatch") metrics.timing("filestore.file-size", offset) if commit: self.save() tf.flush() tf.seek(0) return tf def delete(self, *args, **kwargs): blob_ids = [blob.id for blob in self.blobs.all()] super().delete(*args, **kwargs) # Wait to delete blobs. This helps prevent # races around frequently used blobs in debug images and release files. transaction.on_commit(lambda: delete_unreferenced_blobs.apply_async( kwargs={"blob_ids": blob_ids}, countdown=60 * 5))
class Integration(Model): __core__ = False organizations = models.ManyToManyField("sentry.Organization", related_name="integrations", through=OrganizationIntegration) projects = models.ManyToManyField("sentry.Project", related_name="integrations", through=ProjectIntegration) provider = models.CharField(max_length=64) external_id = models.CharField(max_length=64) name = models.CharField(max_length=200) # metadata might be used to store things like credentials, but it should NOT # be used to store organization-specific information, as the Integration # instance is shared among multiple organizations metadata = EncryptedJsonField(default=dict) status = BoundedPositiveIntegerField(default=ObjectStatus.VISIBLE, choices=ObjectStatus.as_choices(), null=True) date_added = models.DateTimeField(default=timezone.now, null=True) class Meta: app_label = "sentry" db_table = "sentry_integration" unique_together = (("provider", "external_id"), ) def get_provider(self): from sentry import integrations return integrations.get(self.provider) def get_installation(self, organization_id, **kwargs): return self.get_provider().get_installation(self, organization_id, **kwargs) def has_feature(self, feature): return feature in self.get_provider().features def add_organization(self, organization, user=None, default_auth_id=None): """ Add an organization to this integration. Returns False if the OrganizationIntegration was not created """ try: org_integration, created = OrganizationIntegration.objects.get_or_create( organization_id=organization.id, integration_id=self.id, defaults={ "default_auth_id": default_auth_id, "config": {} }, ) if not created and default_auth_id: org_integration.update(default_auth_id=default_auth_id) except IntegrityError: return False else: integration_added.send_robust(integration=self, organization=organization, user=user, sender=self.__class__) return org_integration
class FileBlob(Model): __core__ = False path = models.TextField(null=True) size = BoundedPositiveIntegerField(null=True) checksum = models.CharField(max_length=40, unique=True) timestamp = models.DateTimeField(default=timezone.now, db_index=True) class Meta: app_label = "sentry" db_table = "sentry_fileblob" @classmethod def from_files(cls, files, organization=None, logger=nooplogger): """A faster version of `from_file` for multiple files at the time. If an organization is provided it will also create `FileBlobOwner` entries. Files can be a list of files or tuples of file and checksum. If both are provided then a checksum check is performed. If the checksums mismatch an `IOError` is raised. """ logger.debug("FileBlob.from_files.start") files_with_checksums = [] for fileobj in files: if isinstance(fileobj, tuple): files_with_checksums.append(fileobj) else: files_with_checksums.append((fileobj, None)) checksums_seen = set() blobs_created = [] blobs_to_save = [] locks = set() semaphore = Semaphore(value=MULTI_BLOB_UPLOAD_CONCURRENCY) def _upload_and_pend_chunk(fileobj, size, checksum, lock): logger.debug( "FileBlob.from_files._upload_and_pend_chunk.start", extra={ "checksum": checksum, "size": size }, ) blob = cls(size=size, checksum=checksum) blob.path = cls.generate_unique_path() storage = get_storage() storage.save(blob.path, fileobj) blobs_to_save.append((blob, lock)) metrics.timing("filestore.blob-size", size, tags={"function": "from_files"}) logger.debug( "FileBlob.from_files._upload_and_pend_chunk.end", extra={ "checksum": checksum, "path": blob.path }, ) def _ensure_blob_owned(blob): if organization is None: return try: with transaction.atomic(): FileBlobOwner.objects.create(organization=organization, blob=blob) except IntegrityError: pass def _save_blob(blob): logger.debug("FileBlob.from_files._save_blob.start", extra={"path": blob.path}) blob.save() _ensure_blob_owned(blob) logger.debug("FileBlob.from_files._save_blob.end", extra={"path": blob.path}) def _flush_blobs(): while True: try: blob, lock = blobs_to_save.pop() except IndexError: break _save_blob(blob) lock.__exit__(None, None, None) locks.discard(lock) semaphore.release() try: with ThreadPoolExecutor( max_workers=MULTI_BLOB_UPLOAD_CONCURRENCY) as exe: for fileobj, reference_checksum in files_with_checksums: logger.debug("FileBlob.from_files.executor_start", extra={"checksum": reference_checksum}) _flush_blobs() # Before we go and do something with the files we calculate # the checksums and compare it against the reference. This # also deduplicates duplicates uploaded in the same request. # This is necessary because we acquire multiple locks in one # go which would let us deadlock otherwise. size, checksum = _get_size_and_checksum(fileobj) if reference_checksum is not None and checksum != reference_checksum: raise OSError("Checksum mismatch") if checksum in checksums_seen: continue checksums_seen.add(checksum) # Check if we need to lock the blob. If we get a result back # here it means the blob already exists. lock = _locked_blob(checksum, logger=logger) existing = lock.__enter__() if existing is not None: lock.__exit__(None, None, None) blobs_created.append(existing) _ensure_blob_owned(existing) continue # Remember the lock to force unlock all at the end if we # encounter any difficulties. locks.add(lock) # Otherwise we leave the blob locked and submit the task. # We use the semaphore to ensure we never schedule too # many. The upload will be done with a certain amount # of concurrency controlled by the semaphore and the # `_flush_blobs` call will take all those uploaded # blobs and associate them with the database. semaphore.acquire() exe.submit( _upload_and_pend_chunk(fileobj, size, checksum, lock)) logger.debug("FileBlob.from_files.end", extra={"checksum": reference_checksum}) _flush_blobs() finally: for lock in locks: try: lock.__exit__(None, None, None) except Exception: pass logger.debug("FileBlob.from_files.end") @classmethod def from_file(cls, fileobj, logger=nooplogger): """ Retrieve a single FileBlob instances for the given file. """ logger.debug("FileBlob.from_file.start") size, checksum = _get_size_and_checksum(fileobj) # TODO(dcramer): the database here is safe, but if this lock expires # and duplicate files are uploaded then we need to prune one with _locked_blob(checksum, logger=logger) as existing: if existing is not None: return existing blob = cls(size=size, checksum=checksum) blob.path = cls.generate_unique_path() storage = get_storage() storage.save(blob.path, fileobj) blob.save() metrics.timing("filestore.blob-size", size) logger.debug("FileBlob.from_file.end") return blob @classmethod def generate_unique_path(cls): # We intentionally do not use checksums as path names to avoid concurrency issues # when we attempt concurrent uploads for any reason. uuid_hex = uuid4().hex pieces = [uuid_hex[:2], uuid_hex[2:6], uuid_hex[6:]] return "/".join(pieces) def delete(self, *args, **kwargs): if self.path: self.deletefile(commit=False) lock = locks.get(f"fileblob:upload:{self.checksum}", duration=UPLOAD_RETRY_TIME) with TimedRetryPolicy(UPLOAD_RETRY_TIME, metric_instance="lock.fileblob.delete")( lock.acquire): super().delete(*args, **kwargs) def deletefile(self, commit=False): assert self.path # Defer this by 1 minute just to make sure # we avoid any transaction isolation where the # FileBlob row might still be visible by the # task before transaction is committed. delete_file_task.apply_async(kwargs={ "path": self.path, "checksum": self.checksum }, countdown=60) self.path = None if commit: self.save() def getfile(self): """ Return a file-like object for this File's content. >>> with blob.getfile() as src, open('/tmp/localfile', 'wb') as dst: >>> for chunk in src.chunks(): >>> dst.write(chunk) """ assert self.path storage = get_storage() return storage.open(self.path)
class Organization(Model): """ An organization represents a group of individuals which maintain ownership of projects. """ __core__ = True name = models.CharField(max_length=64) slug = models.SlugField(unique=True) status = BoundedPositiveIntegerField( choices=OrganizationStatus.as_choices(), # south will generate a default value of `'<OrganizationStatus.ACTIVE: 0>'` # if `.value` is omitted default=OrganizationStatus.ACTIVE.value ) date_added = models.DateTimeField(default=timezone.now) members = models.ManyToManyField( settings.AUTH_USER_MODEL, through='sentry.OrganizationMember', related_name='org_memberships' ) default_role = models.CharField( choices=roles.get_choices(), max_length=32, default=roles.get_default().id, ) flags = BitField( flags=( ( 'allow_joinleave', 'Allow members to join and leave teams without requiring approval.' ), ( 'enhanced_privacy', 'Enable enhanced privacy controls to limit personally identifiable information (PII) as well as source code in things like notifications.' ), ( 'disable_shared_issues', 'Disable sharing of limited details on issues to anonymous users.' ), ( 'early_adopter', 'Enable early adopter status, gaining access to features prior to public release.' ), ( 'require_2fa', 'Require and enforce two-factor authentication for all members.' ), ( 'disable_new_visibility_features', 'Temporarily opt out of new visibility features and ui', ), ), default=1 ) objects = OrganizationManager(cache_fields=('pk', 'slug', )) class Meta: app_label = 'sentry' db_table = 'sentry_organization' __repr__ = sane_repr('owner_id', 'name', 'slug') @classmethod def get_default(cls): """ Return the organization used in single organization mode. """ return cls.objects.filter( status=OrganizationStatus.ACTIVE, )[0] def __unicode__(self): return u'%s (%s)' % (self.name, self.slug) def save(self, *args, **kwargs): if not self.slug: lock = locks.get('slug:organization', duration=5) with TimedRetryPolicy(10)(lock.acquire): slugify_instance(self, self.name, reserved=RESERVED_ORGANIZATION_SLUGS) super(Organization, self).save(*args, **kwargs) else: super(Organization, self).save(*args, **kwargs) def delete(self): if self.is_default: raise Exception('You cannot delete the the default organization.') return super(Organization, self).delete() @cached_property def is_default(self): if not settings.SENTRY_SINGLE_ORGANIZATION: return False return self == type(self).get_default() def has_access(self, user, access=None): queryset = self.member_set.filter(user=user) if access is not None: queryset = queryset.filter(type__lte=access) return queryset.exists() def get_audit_log_data(self): return { 'id': self.id, 'slug': self.slug, 'name': self.name, 'status': int(self.status), 'flags': int(self.flags), 'default_role': self.default_role, } def get_owners(self): from sentry.models import User return User.objects.filter( sentry_orgmember_set__role=roles.get_top_dog().id, sentry_orgmember_set__organization=self, is_active=True, ) def get_default_owner(self): if not hasattr(self, '_default_owner'): self._default_owner = self.get_owners()[0] return self._default_owner def has_single_owner(self): from sentry.models import OrganizationMember count = OrganizationMember.objects.filter( organization=self, role=roles.get_top_dog().id, user__isnull=False, user__is_active=True, )[:2].count() return count == 1 def merge_to(from_org, to_org): from sentry.models import ( ApiKey, AuditLogEntry, AuthProvider, Commit, OrganizationAvatar, OrganizationIntegration, OrganizationMember, OrganizationMemberTeam, Project, Release, ReleaseCommit, ReleaseEnvironment, ReleaseFile, ReleaseHeadCommit, Repository, Team, Environment, ) for from_member in OrganizationMember.objects.filter( organization=from_org, user__isnull=False ): logger = logging.getLogger('sentry.merge') try: to_member = OrganizationMember.objects.get( organization=to_org, user=from_member.user, ) except OrganizationMember.DoesNotExist: from_member.update(organization=to_org) to_member = from_member else: qs = OrganizationMemberTeam.objects.filter( organizationmember=from_member, is_active=True, ).select_related() for omt in qs: OrganizationMemberTeam.objects.create_or_update( organizationmember=to_member, team=omt.team, defaults={ 'is_active': True, }, ) logger.info('user.migrate', extra={ 'instance_id': from_member.id, 'new_member_id': to_member.id, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) for from_team in Team.objects.filter(organization=from_org): try: with transaction.atomic(): from_team.update(organization=to_org) except IntegrityError: slugify_instance(from_team, from_team.name, organization=to_org) from_team.update( organization=to_org, slug=from_team.slug, ) logger.info('team.migrate', extra={ 'instance_id': from_team.id, 'new_slug': from_team.slug, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) for from_project in Project.objects.filter(organization=from_org): try: with transaction.atomic(): from_project.update(organization=to_org) except IntegrityError: slugify_instance( from_project, from_project.name, organization=to_org, reserved=RESERVED_PROJECT_SLUGS) from_project.update( organization=to_org, slug=from_project.slug, ) logger.info('project.migrate', extra={ 'instance_id': from_project.id, 'new_slug': from_project.slug, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) # TODO(jess): update this when adding unique constraint # on version, organization for releases for from_release in Release.objects.filter(organization=from_org): try: to_release = Release.objects.get(version=from_release.version, organization=to_org) except Release.DoesNotExist: Release.objects.filter(id=from_release.id).update(organization=to_org) else: Release.merge(to_release, [from_release]) logger.info('release.migrate', extra={ 'instance_id': from_release.id, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) def do_update(queryset, params): model_name = queryset.model.__name__.lower() try: with transaction.atomic(): queryset.update(**params) except IntegrityError: for instance in queryset: try: with transaction.atomic(): instance.update(**params) except IntegrityError: logger.info('{}.migrate-skipped'.format(model_name), extra={ 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) else: logger.info('{}.migrate'.format(model_name), extra={ 'instance_id': instance.id, 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) else: logger.info('{}.migrate'.format(model_name), extra={ 'from_organization_id': from_org.id, 'to_organization_id': to_org.id, }) INST_MODEL_LIST = ( AuthProvider, ApiKey, AuditLogEntry, OrganizationAvatar, OrganizationIntegration, ReleaseEnvironment, ReleaseFile, ) ATTR_MODEL_LIST = ( Commit, ReleaseCommit, ReleaseHeadCommit, Repository, Environment, ) for model in INST_MODEL_LIST: queryset = model.objects.filter( organization=from_org, ) do_update(queryset, {'organization': to_org}) for model in ATTR_MODEL_LIST: queryset = model.objects.filter( organization_id=from_org.id, ) do_update(queryset, {'organization_id': to_org.id}) # TODO: Make these a mixin def update_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.set_value(self, *args, **kwargs) def get_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.get_value(self, *args, **kwargs) def delete_option(self, *args, **kwargs): from sentry.models import OrganizationOption return OrganizationOption.objects.unset_value(self, *args, **kwargs) def send_delete_confirmation(self, audit_log_entry, countdown): from sentry import options from sentry.utils.email import MessageBuilder owners = self.get_owners() context = { 'organization': self, 'audit_log_entry': audit_log_entry, 'eta': timezone.now() + timedelta(seconds=countdown), 'url': absolute_uri(reverse( 'sentry-restore-organization', args=[self.slug], )), } MessageBuilder( subject='%sOrganization Queued for Deletion' % (options.get('mail.subject-prefix'), ), template='sentry/emails/org_delete_confirm.txt', html_template='sentry/emails/org_delete_confirm.html', type='org.confirm_delete', context=context, ).send_async([o.email for o in owners]) def flag_has_changed(self, flag_name): "Returns ``True`` if ``flag`` has changed since initialization." return getattr(self.old_value('flags'), flag_name, None) != getattr(self.flags, flag_name) def handle_2fa_required(self, request): from sentry.models import ApiKey from sentry.tasks.auth import remove_2fa_non_compliant_members actor_id = request.user.id if request.user and request.user.is_authenticated() else None api_key_id = request.auth.id if hasattr( request, 'auth') and isinstance( request.auth, ApiKey) else None ip_address = request.META['REMOTE_ADDR'] remove_2fa_non_compliant_members.delay( self.id, actor_id=actor_id, actor_key_id=api_key_id, ip_address=ip_address ) def get_url_viewname(self): return 'sentry-organization-issue-list' def get_url(self): return reverse(self.get_url_viewname(), args=[self.slug])
class OrganizationMember(Model): """ Identifies relationships between organizations and users. Users listed as team members are considered to have access to all projects and could be thought of as team owners (though their access level may not) be set to ownership. """ __core__ = True organization = FlexibleForeignKey('sentry.Organization', related_name="member_set") user = FlexibleForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, related_name="sentry_orgmember_set") email = models.EmailField(null=True, blank=True) role = models.CharField( choices=roles.get_choices(), max_length=32, default=roles.get_default().id, ) flags = BitField(flags=( ('sso:linked', 'sso:linked'), ('sso:invalid', 'sso:invalid'), ), default=0) token = models.CharField(max_length=64, null=True, blank=True, unique=True) date_added = models.DateTimeField(default=timezone.now) token_expires_at = models.DateTimeField(default=None, null=True) has_global_access = models.BooleanField(default=True) teams = models.ManyToManyField('sentry.Team', blank=True, through='sentry.OrganizationMemberTeam') # Deprecated -- no longer used type = BoundedPositiveIntegerField(default=50, blank=True) class Meta: app_label = 'sentry' db_table = 'sentry_organizationmember' unique_together = ( ('organization', 'user'), ('organization', 'email'), ) __repr__ = sane_repr( 'organization_id', 'user_id', 'role', ) @transaction.atomic def save(self, *args, **kwargs): assert self.user_id or self.email, \ 'Must set user or email' if self.token and not self.token_expires_at: self.refresh_expires_at() super(OrganizationMember, self).save(*args, **kwargs) def set_user(self, user): self.user = user self.email = None self.token = None self.token_expires_at = None def remove_user(self): self.email = self.get_email() self.user = None self.token = self.generate_token() def regenerate_token(self): self.token = self.generate_token() self.refresh_expires_at() def refresh_expires_at(self): now = timezone.now() self.token_expires_at = now + timedelta(days=INVITE_DAYS_VALID) @property def is_pending(self): return self.user_id is None @property def token_expired(self): # Old tokens don't expire to preserve compatibility and not require # a backfill migration. if self.token_expires_at is None: return False if self.token_expires_at > timezone.now(): return False return True @property def legacy_token(self): checksum = md5() checksum.update(six.text_type(self.organization_id).encode('utf-8')) checksum.update(self.get_email().encode('utf-8')) checksum.update(force_bytes(settings.SECRET_KEY)) return checksum.hexdigest() def generate_token(self): return uuid4().hex + uuid4().hex def get_invite_link(self): if not self.is_pending: return None return absolute_uri( reverse('sentry-accept-invite', kwargs={ 'member_id': self.id, 'token': self.token or self.legacy_token, })) def send_invite_email(self): from sentry.utils.email import MessageBuilder context = { 'email': self.email, 'organization': self.organization, 'url': self.get_invite_link(), } msg = MessageBuilder( subject='Join %s in using Sentry' % self.organization.name, template='sentry/emails/member-invite.txt', html_template='sentry/emails/member-invite.html', type='organization.invite', context=context, ) try: msg.send_async([self.get_email()]) except Exception as e: logger = get_logger(name='sentry.mail') logger.exception(e) def send_sso_link_email(self, actor, provider): from sentry.utils.email import MessageBuilder link_args = {'organization_slug': self.organization.slug} context = { 'organization': self.organization, 'actor': actor, 'provider': provider, 'url': absolute_uri(reverse('sentry-auth-organization', kwargs=link_args)), } msg = MessageBuilder( subject='Action Required for %s' % (self.organization.name, ), template='sentry/emails/auth-link-identity.txt', html_template='sentry/emails/auth-link-identity.html', type='organization.auth_link', context=context, ) msg.send_async([self.get_email()]) def send_sso_unlink_email(self, actor, provider): from sentry.utils.email import MessageBuilder from sentry.models import LostPasswordHash email = self.get_email() recover_uri = u'{path}?{query}'.format( path=reverse('sentry-account-recover'), query=urlencode({'email': email}), ) # Nothing to send if this member isn't associated to a user if not self.user_id: return context = { 'email': email, 'recover_url': absolute_uri(recover_uri), 'has_password': self.user.password, 'organization': self.organization, 'actor': actor, 'provider': provider, } if not self.user.password: password_hash = LostPasswordHash.for_user(self.user) context['set_password_url'] = password_hash.get_absolute_url( mode='set_password') msg = MessageBuilder( subject='Action Required for %s' % (self.organization.name, ), template='sentry/emails/auth-sso-disabled.txt', html_template='sentry/emails/auth-sso-disabled.html', type='organization.auth_sso_disabled', context=context, ) msg.send_async([email]) def get_display_name(self): if self.user_id: return self.user.get_display_name() return self.email def get_label(self): if self.user_id: return self.user.get_label() return self.email or self.id def get_email(self): if self.user_id and self.user.email: return self.user.email return self.email def get_avatar_type(self): if self.user_id: return self.user.get_avatar_type() return 'letter_avatar' def get_audit_log_data(self): from sentry.models import Team teams = list( Team.objects.filter(id__in=OrganizationMemberTeam.objects.filter( organizationmember=self, is_active=True, ).values_list('team', flat=True)).values('id', 'slug')) return { 'email': self.get_email(), 'user': self.user_id, 'teams': [t['id'] for t in teams], 'teams_slugs': [t['slug'] for t in teams], 'has_global_access': self.has_global_access, 'role': self.role, } def get_teams(self): from sentry.models import Team return Team.objects.filter( status=TeamStatus.VISIBLE, id__in=OrganizationMemberTeam.objects.filter( organizationmember=self, is_active=True, ).values('team')) def get_scopes(self): return roles.get(self.role).scopes @classmethod def delete_expired(cls, threshold): """ Delete un-accepted member invitations that expired ``threshold`` days ago. """ cls.objects.filter( token_expires_at__lt=threshold, user_id__exact=None, ).exclude(email__exact=None).delete()