def handle(self, **options): from django.db.models import Q from sentry.constants import RESERVED_ORGANIZATION_SLUGS from sentry.models import Organization, Project, Team, ProjectKey from sentry.db.models import update from sentry.db.models.utils import slugify_instance from sentry.utils.query import RangeQuerySetWrapperWithProgressBar print("Correcting data on organizations") queryset = Organization.objects.filter( slug__isnull=True, ) for org in RangeQuerySetWrapperWithProgressBar(queryset): slugify_instance(org, org.name, RESERVED_ORGANIZATION_SLUGS) org.save() # Create teams for any projects that are missing them print("Correcting data on projects") queryset = Project.objects.filter( Q(team__isnull=True) | Q(organization__isnull=True), ).select_related('owner') for project in RangeQuerySetWrapperWithProgressBar(queryset): if not project.team: organization = Organization( name=project.name, owner=project.owner, ) slugify_instance(organization, organization.name, RESERVED_ORGANIZATION_SLUGS) organization.save() team = Team( name=project.name, owner=project.owner, oprganization=organization, ) slugify_instance(team, team.name, RESERVED_ORGANIZATION_SLUGS) team.save() update(project, organization=team.organization, team=team) # Create missing project keys print("Creating missing project keys") queryset = Team.objects.all() for team in RangeQuerySetWrapperWithProgressBar(queryset): for member in team.member_set.select_related('user'): for project in team.project_set.all(): try: created = ProjectKey.objects.get_or_create( project=project, user=member.user, )[1] except ProjectKey.MultipleObjectsReturned: pass
def get(self, request, *args, **kwargs): next_uri = self.get_next_uri(request, *args, **kwargs) if request.user.is_authenticated(): # if the user is a superuser, but not 'superuser authenticated' # we allow them to re-authenticate to gain superuser status if not request.user.is_superuser or is_active_superuser(request): return self.handle_authenticated(request, *args, **kwargs) request.session.set_test_cookie() # we always reset the state on GET so you dont end up at an odd location auth.initiate_login(request, next_uri) # Single org mode -- send them to the org-specific handler if settings.SENTRY_SINGLE_ORGANIZATION: org = Organization.get_default() next_uri = reverse('sentry-auth-organization', args=[org.slug]) return HttpResponseRedirect(next_uri) session_expired = 'session_expired' in request.COOKIES if session_expired: messages.add_message(request, messages.WARNING, WARN_SESSION_EXPIRED) response = self.handle_basic_auth(request, *args, **kwargs) if session_expired: response.delete_cookie('session_expired') return response
def handle(self, request): if request.user.is_authenticated(): return self.redirect_to_org(request) # Single org mode -- send them to the org-specific handler if settings.SENTRY_SINGLE_ORGANIZATION: org = Organization.get_default() next_uri = reverse('sentry-auth-organization', args=[org.slug]) return HttpResponseRedirect(next_uri) op = request.POST.get('op') if op == 'sso' and request.POST.get('organization'): auth_provider = self.get_auth_provider(request.POST['organization']) if auth_provider: next_uri = reverse('sentry-auth-organization', args=[request.POST['organization']]) else: next_uri = request.path messages.add_message(request, messages.ERROR, ERR_NO_SSO) return HttpResponseRedirect(next_uri) session_expired = 'session_expired' in request.COOKIES if session_expired: messages.add_message(request, messages.WARNING, WARN_SESSION_EXPIRED) response = self.handle_basic_auth(request) if session_expired: response.delete_cookie('session_expired') return response
def register(request): from django.conf import settings if not (features.has('auth:register') or request.session.get('can_register')): return HttpResponseRedirect(reverse('sentry')) form = RegistrationForm(request.POST or None, captcha=bool(request.session.get('needs_captcha'))) if form.is_valid(): user = form.save() # TODO(dcramer): ideally this would be handled by a special view # specifically for organization registration if settings.SENTRY_SINGLE_ORGANIZATION: org = Organization.get_default() defaults = { 'has_global_access': True, 'type': OrganizationMemberType.MEMBER, } try: auth_provider = AuthProvider.objects.get( organization=org.id, ) except AuthProvider.DoesNotExist: pass else: defaults.update({ 'has_global_access': auth_provider.default_global_access, 'type': auth_provider.default_role, }) org.member_set.create( user=user, **defaults ) # can_register should only allow a single registration request.session.pop('can_register', None) # HACK: grab whatever the first backend is and assume it works user.backend = settings.AUTHENTICATION_BACKENDS[0] login_user(request, user) request.session.pop('needs_captcha', None) return login_redirect(request) elif request.POST and not request.session.get('needs_captcha'): request.session['needs_captcha'] = 1 form = RegistrationForm(request.POST or None, captcha=True) form.errors.pop('captcha', None) return render_to_response('sentry/register.html', { 'form': form, }, request)
def createuser(email, password, superuser, no_password, no_input): "Create a new user." if not no_input: if not email: email = _get_email() if not (password or no_password): password = _get_password() if superuser is None: superuser = _get_superuser() if superuser is None: superuser = False if not email: raise click.ClickException('Invalid or missing email address.') # TODO(mattrobenolt): Accept password over stdin? if not no_password and not password: raise click.ClickException('No password set and --no-password not passed.') from sentry import roles from sentry.models import User from django.conf import settings user = User( email=email, username=email, is_superuser=superuser, is_staff=superuser, is_active=True, ) if password: user.set_password(password) user.save() click.echo('User created: %s' % (email,)) # TODO(dcramer): kill this when we improve flows if settings.SENTRY_SINGLE_ORGANIZATION: from sentry.models import Organization, OrganizationMember org = Organization.get_default() if superuser: role = roles.get_top_dog().id else: role = org.default_role OrganizationMember.objects.create( organization=org, user=user, role=role, ) click.echo('Added to organization: %s' % (org.slug,))
def handle(self, **options): email = options['email'] is_superuser = options['is_superuser'] password = options['password'] if not options['noinput']: try: if not email: email = self.get_email() if not (password or options['nopassword']): password = self.get_password() if is_superuser is None: is_superuser = self.get_superuser() except KeyboardInterrupt: self.stderr.write("\nOperation cancelled.") sys.exit(1) if not email: raise CommandError('Invalid or missing email address') if not options['nopassword'] and not password: raise CommandError('No password set and --no-password not passed') user = User( email=email, username=email, is_superuser=is_superuser, is_staff=is_superuser, is_active=True, ) if password: user.set_password(password) user.save() self.stdout.write('User created: %s' % (email,)) # TODO(dcramer): kill this when we improve flows if settings.SENTRY_SINGLE_ORGANIZATION: org = Organization.get_default() OrganizationMember.objects.create( organization=org, user=user, type=OrganizationMemberType.OWNER, has_global_access=user.is_superuser, ) self.stdout.write('Added to organization: %s' % (org.slug,))
def handle(self, request): if settings.SENTRY_SINGLE_ORGANIZATION: org = Organization.get_default() next_uri = reverse('sentry-auth-organization', args=[org.slug]) return HttpResponseRedirect(next_uri) if request.POST.get('op') == 'sso' and request.POST.get('organization'): auth_provider = self.get_auth_provider(request.POST['organization']) if auth_provider: next_uri = reverse('sentry-auth-organization', args=[request.POST['organization']]) else: next_uri = request.path messages.add_message(request, messages.ERROR, ERR_NO_SSO) return HttpResponseRedirect(next_uri) return self.handle_basic_auth(request)
def handle(self, request): next_uri = request.GET.get(REDIRECT_FIELD_NAME, None) if request.user.is_authenticated(): if auth.is_valid_redirect(next_uri): return self.redirect(next_uri) return self.redirect_to_org(request) request.session.set_test_cookie() if next_uri: auth.initiate_login(request, next_uri) # Single org mode -- send them to the org-specific handler if settings.SENTRY_SINGLE_ORGANIZATION: org = Organization.get_default() next_uri = reverse("sentry-auth-organization", args=[org.slug]) return HttpResponseRedirect(next_uri) op = request.POST.get("op") if op == "sso" and request.POST.get("organization"): auth_provider = self.get_auth_provider(request.POST["organization"]) if auth_provider: next_uri = reverse("sentry-auth-organization", args=[request.POST["organization"]]) else: next_uri = request.path messages.add_message(request, messages.ERROR, ERR_NO_SSO) return HttpResponseRedirect(next_uri) session_expired = "session_expired" in request.COOKIES if session_expired: messages.add_message(request, messages.WARNING, WARN_SESSION_EXPIRED) response = self.handle_basic_auth(request) if session_expired: response.delete_cookie("session_expired") return response
def org_delete_confirm(request): from sentry.models import AuditLogEntry org = Organization.get_default() entry = AuditLogEntry( organization=org, actor=request.user, ip_address=request.META['REMOTE_ADDR'], ) return MailPreview( html_template='sentry/emails/org_delete_confirm.html', text_template='sentry/emails/org_delete_confirm.txt', context={ 'organization': org, 'audit_log_entry': entry, 'eta': timezone.now() + timedelta(days=1), 'url': absolute_uri(reverse( 'sentry-restore-organization', args=[org.slug], )), }, ).render(request)
def digest(request): random = get_random(request) # TODO: Refactor all of these into something more manageable. org = Organization(id=1, slug="example", name="Example Organization") project = Project(id=1, slug="example", name="Example Project", organization=org) rules = { i: Rule(id=i, project=project, label=f"Rule #{i}") for i in range(1, random.randint(2, 4)) } state = { "project": project, "groups": {}, "rules": rules, "event_counts": {}, "user_counts": {}, } records = [] group_generator = make_group_generator(random, project) for i in range(random.randint(1, 30)): group = next(group_generator) state["groups"][group.id] = group offset = timedelta(seconds=0) for i in range(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) data = dict(load_data("python")) data["message"] = group.message data.pop("logentry", None) event_manager = EventManager(data) event_manager.normalize() data = event_manager.get_data() data["timestamp"] = random.randint( to_timestamp(group.first_seen), to_timestamp(group.last_seen) ) event = eventstore.create_event( event_id=uuid.uuid4().hex, group_id=group.id, project_id=project.id, data=data.data ) records.append( Record( event.event_id, Notification( event, random.sample( list(state["rules"].keys()), random.randint(1, len(state["rules"])) ), ), to_timestamp(event.datetime), ) ) state["event_counts"][group.id] = random.randint(10, 1e4) state["user_counts"][group.id] = random.randint(10, 1e4) digest = build_digest(project, records, state)[0] start, end, counts = get_digest_metadata(digest) context = { "project": project, "counts": counts, "digest": digest, "start": start, "end": end, "referrer": "digest_email", } add_unsubscribe_link(context) return MailPreview( html_template="sentry/emails/digests/body.html", text_template="sentry/emails/digests/body.txt", context=context, ).render(request)
def test_reverse_resolve(): mock_org = Organization() assert INDEXER.reverse_resolve(mock_org, UseCase.METRIC, 666) is None assert INDEXER.reverse_resolve(mock_org, UseCase.METRIC, 11) == "user"
def alert(request): platform = request.GET.get('platform', 'python') org = Organization( id=1, slug='example', name='Example', ) project = Project( id=1, slug='example', name='Example', organization=org, ) random = get_random(request) group = next(make_group_generator(random, project), ) event = Event( id=1, event_id='44f1419e73884cd2b45c79918f4b6dc4', project=project, group=group, message=group.message, data=load_data(platform), datetime=to_datetime( random.randint( to_timestamp(group.first_seen), to_timestamp(group.last_seen), ), ), ) rule = Rule(label="An example rule") interface_list = [] for interface in six.itervalues(event.interfaces): body = interface.to_email_html(event) if not body: continue interface_list.append((interface.get_title(), mark_safe(body))) return MailPreview( html_template='sentry/emails/error.html', text_template='sentry/emails/error.txt', context={ 'rule': rule, 'group': group, 'event': event, 'link': 'http://example.com/link', 'interfaces': interface_list, 'tags': event.get_tags(), 'project_label': project.name, 'tags': [('logger', 'javascript'), ('environment', 'prod'), ('level', 'error'), ('device', 'Other')], 'commits': [{ # TODO(dcramer): change to use serializer "repository": { "status": "active", "name": "Example Repo", "url": "https://github.com/example/example", "dateCreated": "2018-02-28T23:39:22.402Z", "provider": { "id": "github", "name": "GitHub" }, "id": "1" }, "score": 2, "subject": "feat: Do something to raven/base.py", "message": "feat: Do something to raven/base.py\naptent vivamus vehicula tempus volutpat hac tortor", "id": "1b17483ffc4a10609e7921ee21a8567bfe0ed006", "shortId": "1b17483", "author": { "username": "******", "isManaged": False, "lastActive": "2018-03-01T18:25:28.149Z", "id": "1", "isActive": True, "has2fa": False, "name": "*****@*****.**", "avatarUrl": "https://secure.gravatar.com/avatar/51567a4f786cd8a2c41c513b592de9f9?s=32&d=mm", "dateJoined": "2018-02-27T22:04:32.847Z", "emails": [{ "is_verified": False, "id": "1", "email": "*****@*****.**" }], "avatar": { "avatarUuid": None, "avatarType": "letter_avatar" }, "lastLogin": "******", "email": "*****@*****.**" } }], }, ).render(request)
def digest(request): random = get_random(request) # TODO: Refactor all of these into something more manageable. org = Organization( id=1, slug='example', name='Example Organization', ) team = Team( id=1, slug='example', name='Example Team', organization=org, ) project = Project( id=1, slug='example', name='Example Project', team=team, organization=org, ) rules = { i: Rule( id=i, project=project, label="Rule #%s" % (i, ), ) for i in xrange(1, random.randint(2, 4)) } state = { 'project': project, 'groups': {}, 'rules': rules, 'event_counts': {}, 'user_counts': {}, } records = [] event_sequence = itertools.count(1) group_generator = make_group_generator(random, project) for i in xrange(random.randint(1, 30)): group = next(group_generator) state['groups'][group.id] = group offset = timedelta(seconds=0) for i in xrange(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) event = Event(id=next(event_sequence), event_id=uuid.uuid4().hex, project=project, group=group, message=group.message, data=load_data('python'), datetime=to_datetime( random.randint( to_timestamp(group.first_seen), to_timestamp(group.last_seen), ), )) records.append( Record( event.event_id, Notification( event, random.sample(state['rules'], random.randint(1, len(state['rules']))), ), to_timestamp(event.datetime), )) state['event_counts'][group.id] = random.randint(10, 1e4) state['user_counts'][group.id] = random.randint(10, 1e4) digest = build_digest(project, records, state) start, end, counts = get_digest_metadata(digest) return MailPreview( html_template='sentry/emails/digests/body.html', text_template='sentry/emails/digests/body.txt', context={ 'project': project, 'counts': counts, 'digest': digest, 'start': start, 'end': end, }, ).render(request)
def test_reverse_resolve(): mock_org_id = Organization().id assert INDEXER.reverse_resolve(mock_org_id, 666) is None assert INDEXER.reverse_resolve(mock_org_id, 11) == "user"
def digest(request): seed = request.GET.get('seed', str(time.time())) logger.debug('Using random seed value: %s') random = Random(seed) now = datetime.utcnow().replace(tzinfo=pytz.utc) # TODO: Refactor all of these into something more manageable. org = Organization( id=1, slug='example', name='Example Organization', ) team = Team( id=1, slug='example', name='Example Team', organization=org, ) project = Project( id=1, slug='example', name='Example Project', team=team, organization=org, ) rules = { i: Rule( id=i, project=project, label="Rule #%s" % (i, ), ) for i in xrange(1, random.randint(2, 4)) } state = { 'project': project, 'groups': {}, 'rules': rules, 'event_counts': {}, 'user_counts': {}, } records = [] group_sequence = itertools.count(1) event_sequence = itertools.count(1) for i in xrange(random.randint(1, 30)): group_id = next(group_sequence) culprit = '{module} in {function}'.format( module='.'.join(''.join( random.sample( WORDS, random.randint(1, int(random.paretovariate(2.2))))) for word in xrange(1, 4)), function=random.choice(WORDS)) group = state['groups'][group_id] = Group( id=group_id, project=project, message=words(int(random.weibullvariate(8, 4)), common=False), culprit=culprit, level=random.choice(LOG_LEVELS.keys()), ) offset = timedelta(seconds=0) for i in xrange(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) event = Event( id=next(event_sequence), event_id=uuid.uuid4().hex, project=project, group=group, message=group.message, data=load_data('python'), datetime=now - offset, ) records.append( Record( event.event_id, Notification( event, random.sample(state['rules'], random.randint(1, len(state['rules']))), ), to_timestamp(event.datetime), )) state['event_counts'][group_id] = random.randint(10, 1e4) state['user_counts'][group_id] = random.randint(10, 1e4) digest = build_digest(project, records, state) start, end, counts = get_digest_metadata(digest) return MailPreview( html_template='sentry/emails/digests/body.html', text_template='sentry/emails/digests/body.txt', context={ 'project': project, 'counts': counts, 'digest': digest, 'start': start, 'end': end, }, ).render()
def get(self, request): org = Organization(id=1, slug="organization", name="My Company") projects = [ Project(id=1, organization=org, slug="project", name="My Project"), Project(id=2, organization=org, slug="another-project", name="Another Project"), Project(id=3, organization=org, slug="yet-another-project", name="Yet Another Project"), ] release = Release( organization_id=org.id, version="6c998f755f304593a4713abd123eaf8833a2de5e", date_added=datetime.datetime(2016, 10, 12, 15, 39, tzinfo=pytz.utc), ) deploy = Deploy( release=release, organization_id=org.id, environment_id=1, date_finished=datetime.datetime(2016, 10, 12, 15, 39, tzinfo=pytz.utc), ) release_links = [ absolute_uri("/organizations/{}/releases/{}/?project={}".format( org.slug, release.version, p.id)) for p in projects ] repos = [ { "name": "getsentry/getsentry", "commits": [ ( Commit( key="48b86fcd677da3dba5679d7a738240ce6fb74b20", date_added=datetime.datetime(2016, 10, 11, 15, 39, tzinfo=pytz.utc), ), None, ), ( Commit( key="a53a2756bb8d111b43196210b34df90b87ed336b", message="Fix billing", author=CommitAuthor(name="David Cramer", email="*****@*****.**"), date_added=datetime.datetime(2016, 10, 11, 16, 45, tzinfo=pytz.utc), ), User(email="*****@*****.**", name="David Cramer"), ), ], }, { "name": "getsentry/sentry", "commits": [ ( Commit( key="3c8eb3b4af6ee2a29c68daa188fc730c8e4b39fd", date_added=datetime.datetime(2016, 10, 10, 15, 39, tzinfo=pytz.utc), ), None, ), ( Commit( key="373562702009df1692da6eb80a933139f29e094b", message="Fix padding", author=CommitAuthor(name="Chris Jennings", email="*****@*****.**"), date_added=datetime.datetime(2016, 10, 10, 16, 39, tzinfo=pytz.utc), ), None, ), ( Commit( key="631cd9096bd9811a046a472bb0aa8b573e86e1f1", message="Update README.rst", author=CommitAuthor(name="David Cramer", email="*****@*****.**"), date_added=datetime.datetime(2016, 10, 11, 10, 39, tzinfo=pytz.utc), ), User(email="*****@*****.**", name="David Cramer"), ), ], }, ] return MailPreview( html_template="sentry/emails/activity/release.html", text_template="sentry/emails/activity/release.txt", context={ "release": release, "projects": zip(projects, release_links, [6, 1, 0]), "repos": repos, "reason": GroupSubscriptionReason.descriptions[ GroupSubscriptionReason.committed], "project_count": len(projects), "commit_count": 4, "author_count": 1, "file_count": 5, "environment": "production", "deploy": deploy, "setup_repo_link": absolute_uri("/organizations/{}/repos/".format(org.slug)), }, ).render(request)
def get_context(request): org = Organization(name="My Company") provider = DummyProvider("dummy") return {"organization": org, "actor": request.user, "provider": provider}
# Bootstrap the Sentry environment from sentry.utils.runner import configure configure("/home/sentry/sentry.conf.py") # Do something crazy from sentry.models import Team, Project, ProjectKey, User, Organization user = User.objects.get(pk=1) organization = Organization() organization.name = 'AgoraVoting' organization.owner = user organization.save() team = Team() team.name = 'AgoraVoting' team.organization = organization team.save() project = Project() project.team = team project.name = 'AuthApi' project.organization = organization project.save() key = ProjectKey.objects.filter(project=project)[0] dsn = key.get_dsn() # writting the sentry configuration to deploy.conf authapi_conf = ''' # sentry
def createuser(email, password, superuser, no_password, no_input, force_update): "Create a new user." if not no_input: if not email: email = _get_email() if not (password or no_password): password = _get_password() if superuser is None: superuser = _get_superuser() if superuser is None: superuser = False if not email: raise click.ClickException("Invalid or missing email address.") # TODO(mattrobenolt): Accept password over stdin? if not no_password and not password: raise click.ClickException( "No password set and --no-password not passed.") from sentry import roles from sentry.models import User from django.conf import settings fields = dict(email=email, username=email, is_superuser=superuser, is_staff=superuser, is_active=True) verb = None try: user = User.objects.get(username=email) except User.DoesNotExist: user = None if user is not None: if force_update: user.update(**fields) verb = "updated" else: click.echo(f"User: {email} exists, use --force-update to force") sys.exit(3) else: user = User.objects.create(**fields) verb = "created" # TODO(dcramer): kill this when we improve flows if settings.SENTRY_SINGLE_ORGANIZATION: from sentry.models import Organization, OrganizationMember, OrganizationMemberTeam, Team org = Organization.get_default() if superuser: role = roles.get_top_dog().id else: role = org.default_role member = OrganizationMember.objects.create(organization=org, user=user, role=role) # if we've only got a single team let's go ahead and give # access to that team as its likely the desired outcome teams = list(Team.objects.filter(organization=org)[0:2]) if len(teams) == 1: OrganizationMemberTeam.objects.create( team=teams[0], organizationmember=member) click.echo(f"Added to organization: {org.slug}") if password: user.set_password(password) user.save() click.echo(f"User {verb}: {email}")
os.environ['SENTRY_CONF'] = "/home/sentry/sentry.conf.py" # Bootstrap the Sentry environment from sentry.runner import configure configure() # Create the org, team and project if needed from sentry.models import Team, Project, ProjectKey, User, Organization, OrganizationMember user = User.objects.get(pk=1) name = 'AgoraVoting' name2 = 'AuthApi' if Organization.objects.filter(name=name).count() == 0: organization = Organization() organization.name = name organization.save() om = OrganizationMember() om.organization = organization om.role = 'owner' om.user = user om.save() team = Team() team.name = name team.organization = organization team.save() project = Project()
def _prepare_query_params(query_params): # convert to naive UTC datetimes, as Snuba only deals in UTC # and this avoids offset-naive and offset-aware issues start = naiveify_datetime(query_params.start) end = naiveify_datetime(query_params.end) with timer("get_snuba_map"): forward, reverse = get_snuba_translators( query_params.filter_keys, is_grouprelease=query_params.is_grouprelease ) if "project_id" in query_params.filter_keys: # If we are given a set of project ids, use those directly. project_ids = list(set(query_params.filter_keys["project_id"])) elif query_params.filter_keys: # Otherwise infer the project_ids from any related models with timer("get_related_project_ids"): ids = [ get_related_project_ids(k, query_params.filter_keys[k]) for k in query_params.filter_keys ] project_ids = list(set.union(*map(set, ids))) else: project_ids = [] for col, keys in six.iteritems(forward(deepcopy(query_params.filter_keys))): if keys: if len(keys) == 1 and None in keys: query_params.conditions.append((col, "IS NULL", None)) else: query_params.conditions.append((col, "IN", keys)) if not project_ids: raise UnqualifiedQueryError( "No project_id filter, or none could be inferred from other filters." ) # any project will do, as they should all be from the same organization project = Project.objects.get(pk=project_ids[0]) retention = quotas.get_event_retention(organization=Organization(project.organization_id)) if retention: start = max(start, datetime.utcnow() - timedelta(days=retention)) if start > end: raise QueryOutsideRetentionError # if `shrink_time_window` pushed `start` after `end` it means the user queried # a Group for T1 to T2 when the group was only active for T3 to T4, so the query # wouldn't return any results anyway new_start = shrink_time_window(query_params.filter_keys.get("issue"), start) # TODO (alexh) this is a quick emergency fix for an occasion where a search # results in only 1 django candidate, which is then passed to snuba to # check and we raised because of it. Remove this once we figure out why the # candidate was returned from django at all if it existed only outside the # time range of the query if new_start <= end: start = new_start if start > end: raise QueryOutsideGroupActivityError query_params.kwargs.update( { "from_date": start.isoformat(), "to_date": end.isoformat(), "groupby": query_params.groupby, "conditions": query_params.conditions, "aggregations": query_params.aggregations, "project": project_ids, "granularity": query_params.rollup, # TODO name these things the same } ) kwargs = {k: v for k, v in six.iteritems(query_params.kwargs) if v is not None} kwargs.update(OVERRIDE_OPTIONS) return kwargs, forward, reverse
def test_name_has_changes_before_save(self): inst = Organization(id=1, name="bar") update_tracked_data(inst) inst.name = "baz" self.assertTrue(has_changed(inst, "name")) self.assertEquals(old_value(inst, "name"), "bar")
def raw_query(start, end, groupby=None, conditions=None, filter_keys=None, aggregations=None, rollup=None, arrayjoin=None, limit=None, offset=None, orderby=None, having=None, referrer=None, is_grouprelease=False, selected_columns=None, totals=None, limitby=None, turbo=False): """ Sends a query to snuba. `conditions`: A list of (column, operator, literal) conditions to be passed to the query. Conditions that we know will not have to be translated should be passed this way (eg tag[foo] = bar). `filter_keys`: A dictionary of {col: [key, ...]} that will be converted into "col IN (key, ...)" conditions. These are used to restrict the query to known sets of project/issue/environment/release etc. Appropriate translations (eg. from environment model ID to environment name) are performed on the query, and the inverse translation performed on the result. The project_id(s) to restrict the query to will also be automatically inferred from these keys. `aggregations` a list of (aggregation_function, column, alias) tuples to be passed to the query. """ # convert to naive UTC datetimes, as Snuba only deals in UTC # and this avoids offset-naive and offset-aware issues start = naiveify_datetime(start) end = naiveify_datetime(end) groupby = groupby or [] conditions = conditions or [] having = having or [] aggregations = aggregations or [] filter_keys = filter_keys or {} selected_columns = selected_columns or [] with timer('get_snuba_map'): forward, reverse = get_snuba_translators( filter_keys, is_grouprelease=is_grouprelease) if 'project_id' in filter_keys: # If we are given a set of project ids, use those directly. project_ids = list(set(filter_keys['project_id'])) elif filter_keys: # Otherwise infer the project_ids from any related models with timer('get_related_project_ids'): ids = [ get_related_project_ids(k, filter_keys[k]) for k in filter_keys ] project_ids = list(set.union(*map(set, ids))) else: project_ids = [] for col, keys in six.iteritems(forward(filter_keys.copy())): if keys: if len(keys) == 1 and None in keys: conditions.append((col, 'IS NULL', None)) else: conditions.append((col, 'IN', keys)) if not project_ids: raise UnqualifiedQueryError( "No project_id filter, or none could be inferred from other filters." ) # any project will do, as they should all be from the same organization project = Project.objects.get(pk=project_ids[0]) retention = quotas.get_event_retention( organization=Organization(project.organization_id)) if retention: start = max(start, datetime.utcnow() - timedelta(days=retention)) if start > end: raise QueryOutsideRetentionError start = shrink_time_window(filter_keys.get('issue'), start) # if `shrink_time_window` pushed `start` after `end` it means the user queried # a Group for T1 to T2 when the group was only active for T3 to T4, so the query # wouldn't return any results anyway if start > end: raise QueryOutsideGroupActivityError request = { k: v for k, v in six.iteritems({ 'from_date': start.isoformat(), 'to_date': end.isoformat(), 'conditions': conditions, 'having': having, 'groupby': groupby, 'totals': totals, 'project': project_ids, 'aggregations': aggregations, 'granularity': rollup, 'arrayjoin': arrayjoin, 'limit': limit, 'offset': offset, 'limitby': limitby, 'orderby': orderby, 'selected_columns': selected_columns, 'turbo': turbo }) if v is not None } request.update(OVERRIDE_OPTIONS) headers = {} if referrer: headers['referer'] = referrer try: with timer('snuba_query'): response = _snuba_pool.urlopen('POST', '/query', body=json.dumps(request), headers=headers) except urllib3.exceptions.HTTPError as err: raise SnubaError(err) try: body = json.loads(response.data) except ValueError: raise UnexpectedResponseError( u"Could not decode JSON response: {}".format(response.data)) if response.status != 200: if body.get('error'): error = body['error'] if response.status == 429: raise RateLimitExceeded(error['message']) elif error['type'] == 'schema': raise SchemaValidationError(error['message']) elif error['type'] == 'clickhouse': raise clickhouse_error_codes_map.get( error['code'], QueryExecutionError, )(error['message']) else: raise SnubaError(error['message']) else: raise SnubaError(u'HTTP {}'.format(response.status)) # Forward and reverse translation maps from model ids to snuba keys, per column body['data'] = [reverse(d) for d in body['data']] return body
def raw_query(start, end, groupby=None, conditions=None, filter_keys=None, aggregations=None, rollup=None, arrayjoin=None, limit=None, orderby=None, having=None, referrer=None, is_grouprelease=False, selected_columns=None, totals=None): """ Sends a query to snuba. `conditions`: A list of (column, operator, literal) conditions to be passed to the query. Conditions that we know will not have to be translated should be passed this way (eg tag[foo] = bar). `filter_keys`: A dictionary of {col: [key, ...]} that will be converted into "col IN (key, ...)" conditions. These are used to restrict the query to known sets of project/issue/environment/release etc. Appropriate translations (eg. from environment model ID to environment name) are performed on the query, and the inverse translation performed on the result. The project_id(s) to restrict the query to will also be automatically inferred from these keys. `aggregations` a list of (aggregation_function, column, alias) tuples to be passed to the query. """ # convert to naive UTC datetimes, as Snuba only deals in UTC # and this avoids offset-naive and offset-aware issues start = start if not start.tzinfo else start.astimezone(pytz.utc).replace( tzinfo=None) end = end if not end.tzinfo else end.astimezone(pytz.utc).replace( tzinfo=None) groupby = groupby or [] conditions = conditions or [] having = having or [] aggregations = aggregations or [] filter_keys = filter_keys or {} selected_columns = selected_columns or [] with timer('get_snuba_map'): forward, reverse = get_snuba_translators( filter_keys, is_grouprelease=is_grouprelease) if 'project_id' in filter_keys: # If we are given a set of project ids, use those directly. project_ids = filter_keys['project_id'] elif filter_keys: # Otherwise infer the project_ids from any related models with timer('get_related_project_ids'): ids = [ get_related_project_ids(k, filter_keys[k]) for k in filter_keys ] project_ids = list(set.union(*map(set, ids))) else: project_ids = [] for col, keys in six.iteritems(forward(filter_keys.copy())): if keys: if len(keys) == 1 and keys[0] is None: conditions.append((col, 'IS NULL', None)) else: conditions.append((col, 'IN', keys)) if not project_ids: raise SnubaError( "No project_id filter, or none could be inferred from other filters." ) # any project will do, as they should all be from the same organization project = Project.objects.get(pk=project_ids[0]) retention = quotas.get_event_retention( organization=Organization(project.organization_id)) if retention: start = max(start, datetime.utcnow() - timedelta(days=retention)) if start > end: raise EntireQueryOutsideRetentionError # If the grouping, aggregation, or any of the conditions reference `issue` # we need to fetch the issue definitions (issue -> fingerprint hashes) aggregate_cols = [a[1] for a in aggregations] condition_cols = [c[0] for c in flat_conditions(conditions)] all_cols = groupby + aggregate_cols + condition_cols + selected_columns get_issues = 'issue' in all_cols with timer('get_project_issues'): issues = get_project_issues( project_ids, filter_keys.get('issue')) if get_issues else None request = { k: v for k, v in six.iteritems({ 'from_date': start.isoformat(), 'to_date': end.isoformat(), 'conditions': conditions, 'having': having, 'groupby': groupby, 'totals': totals, 'project': project_ids, 'aggregations': aggregations, 'granularity': rollup, 'issues': issues, 'arrayjoin': arrayjoin, 'limit': limit, 'orderby': orderby, 'selected_columns': selected_columns, }) if v is not None } headers = {} if referrer: headers['referer'] = referrer try: with timer('snuba_query'): response = _snuba_pool.urlopen('POST', '/query', body=json.dumps(request), headers=headers) except urllib3.exceptions.HTTPError as err: raise SnubaError(err) try: body = json.loads(response.data) except ValueError: raise SnubaError("Could not decode JSON response: {}".format( response.data)) if response.status != 200: if body.get('error'): raise SnubaError(body['error']) else: raise SnubaError('HTTP {}'.format(response.status)) # Forward and reverse translation maps from model ids to snuba keys, per column body['data'] = [reverse(d) for d in body['data']] return body
def test_name_hasnt_changed_on_init(self): inst = Organization(id=1, name="bar") update_tracked_data(inst) self.assertFalse(has_changed(inst, "name"))
def handle_basic_auth(self, request, **kwargs): can_register = self.can_register(request) op = request.POST.get("op") organization = kwargs.pop("organization", None) if not op: # Detect that we are on the register page by url /register/ and # then activate the register tab by default. if "/register" in request.path_info and can_register: op = "register" elif request.GET.get("op") == "sso": op = "sso" login_form = self.get_login_form(request) if can_register: register_form = self.get_register_form( request, initial={"username": request.session.get("invite_email", "")}) else: register_form = None if can_register and register_form.is_valid(): user = register_form.save() user.send_confirm_emails(is_new_user=True) user_signup.send_robust(sender=self, user=user, source="register-form", referrer="in-app") # HACK: grab whatever the first backend is and assume it works user.backend = settings.AUTHENTICATION_BACKENDS[0] auth.login( request, user, organization_id=organization.id if organization else None) # can_register should only allow a single registration request.session.pop("can_register", None) request.session.pop("invite_email", None) # Attempt to directly accept any pending invites invite_helper = ApiInviteHelper.from_cookie(request=request, instance=self) # In single org mode, associate the user to the only organization. # # XXX: Only do this if there isn't a pending invitation. The user # may need to configure 2FA in which case, we don't want to make # the association for them. if settings.SENTRY_SINGLE_ORGANIZATION and not invite_helper: organization = Organization.get_default() OrganizationMember.objects.create( organization=organization, role=organization.default_role, user=user) if invite_helper and invite_helper.valid_request: invite_helper.accept_invite() response = self.redirect_to_org(request) remove_invite_cookie(request, response) return response return self.redirect(auth.get_login_redirect(request)) elif request.method == "POST": from sentry.app import ratelimiter from sentry.utils.hashlib import md5_text login_attempt = (op == "login" and request.POST.get("username") and request.POST.get("password")) if login_attempt and ratelimiter.is_limited( "auth:login:username:{}".format( md5_text( login_form.clean_username( request.POST["username"])).hexdigest()), limit=10, window=60, # 10 per minute should be enough for anyone ): login_form.errors["__all__"] = [ "You have made too many login attempts. Please try again later." ] metrics.incr("login.attempt", instance="rate_limited", skip_internal=True, sample_rate=1.0) elif login_form.is_valid(): user = login_form.get_user() auth.login( request, user, organization_id=organization.id if organization else None) metrics.incr("login.attempt", instance="success", skip_internal=True, sample_rate=1.0) if not user.is_active: return self.redirect(reverse("sentry-reactivate-account")) return self.redirect(auth.get_login_redirect(request)) else: metrics.incr("login.attempt", instance="failure", skip_internal=True, sample_rate=1.0) context = { "op": op or "login", "server_hostname": get_server_hostname(), "login_form": login_form, "organization": organization, "register_form": register_form, "CAN_REGISTER": can_register, "join_request_link": self.get_join_request_link(organization), } context.update(additional_context.run_callbacks(request)) return self.respond_login(request, context, **kwargs)
def get(self, request): org = Organization( id=1, slug='organization', name='My Company', ) projects = [ Project( id=1, organization=org, slug='project', name='My Project', ), Project( id=2, organization=org, slug='another-project', name='Another Project', ), Project( id=3, organization=org, slug='yet-another-project', name='Yet Another Project', ), ] release = Release(organization_id=org.id, version='6c998f755f304593a4713abd123eaf8833a2de5e', date_added=datetime.datetime(2016, 10, 12, 15, 39, tzinfo=pytz.utc)) deploy = Deploy( release=release, organization_id=org.id, environment_id=1, date_finished=datetime.datetime(2016, 10, 12, 15, 39, tzinfo=pytz.utc), ) has_new_links = features.has('organizations:sentry10', org) if has_new_links: release_links = [ absolute_uri(u'/organizations/{}/releases/{}/'.format( org.slug, release.version, )) for p in projects ] else: release_links = [ absolute_uri(u'/{}/{}/releases/{}/'.format( org.slug, p.slug, release.version, )) for p in projects ] repos = [{ 'name': 'getsentry/getsentry', 'commits': [ (Commit( key='48b86fcd677da3dba5679d7a738240ce6fb74b20', date_added=datetime.datetime(2016, 10, 11, 15, 39, tzinfo=pytz.utc), ), None), (Commit( key='a53a2756bb8d111b43196210b34df90b87ed336b', message='Fix billing', author=CommitAuthor( name='David Cramer', email='*****@*****.**', ), date_added=datetime.datetime(2016, 10, 11, 16, 45, tzinfo=pytz.utc), ), User(email='*****@*****.**', name='David Cramer')), ], }, { 'name': 'getsentry/sentry', 'commits': [ (Commit( key='3c8eb3b4af6ee2a29c68daa188fc730c8e4b39fd', date_added=datetime.datetime(2016, 10, 10, 15, 39, tzinfo=pytz.utc), ), None), (Commit( key='373562702009df1692da6eb80a933139f29e094b', message='Fix padding', author=CommitAuthor( name='Chris Jennings', email='*****@*****.**', ), date_added=datetime.datetime(2016, 10, 10, 16, 39, tzinfo=pytz.utc), ), None), (Commit( key='631cd9096bd9811a046a472bb0aa8b573e86e1f1', message='Update README.rst', author=CommitAuthor( name='David Cramer', email='*****@*****.**', ), date_added=datetime.datetime(2016, 10, 11, 10, 39, tzinfo=pytz.utc), ), User(email='*****@*****.**', name='David Cramer')), ], }] return MailPreview( html_template='sentry/emails/activity/release.html', text_template='sentry/emails/activity/release.txt', context={ 'release': release, 'projects': zip(projects, release_links, [6, 1, 0]), 'repos': repos, 'reason': GroupSubscriptionReason.descriptions[ GroupSubscriptionReason.committed], 'project_count': len(projects), 'commit_count': 4, 'author_count': 1, 'file_count': 5, 'environment': 'production', 'deploy': deploy, 'setup_repo_link': absolute_uri('/organizations/{}/repos/'.format(org.slug, )), }, ).render(request)
def test_resolve(): mock_org = Organization() assert INDEXER.resolve(mock_org, UseCase.METRIC, "what") is None assert INDEXER.resolve(mock_org, UseCase.METRIC, "user") == 11
def test_resolve(): mock_org_id = Organization().id assert INDEXER.resolve(mock_org_id, "what") is None assert INDEXER.resolve(mock_org_id, "user") == 11
def get(self, request): org = Organization( id=1, slug='organization', name='My Company', ) team = Team( id=1, slug='team', name='My Team', organization=org, ) project = Project( id=1, organization=org, team=team, slug='project', name='My Project', ) release = Release(project=project, organization_id=project.organization_id, version='6c998f755f304593a4713abd123eaf8833a2de5e', date_added=datetime(2016, 10, 12, 15, 39, tzinfo=pytz.utc)) release_link = absolute_uri('/{}/{}/releases/{}/'.format( org.slug, project.slug, release.version, )) project_link = absolute_uri('/{}/{}/'.format( org.slug, project.slug, )) commit_list = [ Commit(key='48b86fcd677da3dba5679d7a738240ce6fb74b20'), Commit(key='a53a2756bb8d111b43196210b34df90b87ed336b', message='Update README.rst', author=CommitAuthor( name='David Cramer', email='*****@*****.**', )), ] return MailPreview( html_template='sentry/emails/activity/release.html', text_template='sentry/emails/activity/release.txt', context={ 'release': release, 'project': project, 'release_link': release_link, 'project_link': project_link, 'commit_list': commit_list, 'reason': GroupSubscriptionReason.descriptions[ GroupSubscriptionReason.committed], }, ).render(request)
def _prepare_query_params(query_params): # convert to naive UTC datetimes, as Snuba only deals in UTC # and this avoids offset-naive and offset-aware issues start = naiveify_datetime(query_params.start) end = naiveify_datetime(query_params.end) with timer("get_snuba_map"): forward, reverse = get_snuba_translators( query_params.filter_keys, is_grouprelease=query_params.is_grouprelease) if query_params.dataset in [ Dataset.Events, Dataset.Discover, Dataset.Sessions, Dataset.Transactions, ]: (organization_id, params_to_update) = get_query_params_to_update_for_projects( query_params, with_org=query_params.dataset == Dataset.Sessions) elif query_params.dataset in [Dataset.Outcomes, Dataset.OutcomesRaw]: (organization_id, params_to_update ) = get_query_params_to_update_for_organizations(query_params) else: raise UnqualifiedQueryError( "No strategy found for getting an organization for the given dataset." ) query_params.kwargs.update(params_to_update) for col, keys in six.iteritems(forward(deepcopy( query_params.filter_keys))): if keys: if len(keys) == 1 and None in keys: query_params.conditions.append((col, "IS NULL", None)) else: query_params.conditions.append((col, "IN", keys)) retention = quotas.get_event_retention( organization=Organization(organization_id)) if retention: start = max(start, datetime.utcnow() - timedelta(days=retention)) if start > end: raise QueryOutsideRetentionError( "Invalid date range. Please try a more recent date range.") # if `shrink_time_window` pushed `start` after `end` it means the user queried # a Group for T1 to T2 when the group was only active for T3 to T4, so the query # wouldn't return any results anyway new_start = shrink_time_window(query_params.filter_keys.get("group_id"), start) # TODO (alexh) this is a quick emergency fix for an occasion where a search # results in only 1 django candidate, which is then passed to snuba to # check and we raised because of it. Remove this once we figure out why the # candidate was returned from django at all if it existed only outside the # time range of the query if new_start <= end: start = new_start if start > end: raise QueryOutsideGroupActivityError query_params.kwargs.update({ "dataset": query_params.dataset.value, "from_date": start.isoformat(), "to_date": end.isoformat(), "groupby": query_params.groupby, "conditions": query_params.conditions, "aggregations": query_params.aggregations, "granularity": query_params.rollup, # TODO name these things the same }) kwargs = { k: v for k, v in six.iteritems(query_params.kwargs) if v is not None } kwargs.update(OVERRIDE_OPTIONS) return kwargs, forward, reverse
def digest(request): random = get_random(request) # TODO: Refactor all of these into something more manageable. org = Organization(id=1, slug="example", name="Example Organization") project = Project(id=1, slug="example", name="Example Project", organization=org) rules = { i: Rule(id=i, project=project, label="Rule #%s" % (i,)) for i in range(1, random.randint(2, 4)) } state = { "project": project, "groups": {}, "rules": rules, "event_counts": {}, "user_counts": {}, } records = [] event_sequence = itertools.count(1) group_generator = make_group_generator(random, project) for i in range(random.randint(1, 30)): group = next(group_generator) state["groups"][group.id] = group offset = timedelta(seconds=0) for i in range(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) event = Event( id=next(event_sequence), event_id=uuid.uuid4().hex, project=project, group=group, message=group.message, data=load_data("python"), datetime=to_datetime( random.randint(to_timestamp(group.first_seen), to_timestamp(group.last_seen)) ), ) records.append( Record( event.event_id, Notification( event, random.sample(state["rules"], random.randint(1, len(state["rules"]))) ), to_timestamp(event.datetime), ) ) state["event_counts"][group.id] = random.randint(10, 1e4) state["user_counts"][group.id] = random.randint(10, 1e4) digest = build_digest(project, records, state) start, end, counts = get_digest_metadata(digest) context = { "project": project, "counts": counts, "digest": digest, "start": start, "end": end, "referrer": "digest_email", } add_unsubscribe_link(context) return MailPreview( html_template="sentry/emails/digests/body.html", text_template="sentry/emails/digests/body.txt", context=context, ).render(request)
def new_event(request): platform = request.GET.get('platform', 'python') org = Organization( id=1, slug='example', name='Example', ) team = Team( id=1, slug='example', name='Example', organization=org, ) project = Project( id=1, slug='example', name='Example', team=team, organization=org, ) random = get_random(request) group = next(make_group_generator(random, project), ) event = Event( id=1, project=project, group=group, message=group.message, data=load_data(platform), datetime=to_datetime( random.randint( to_timestamp(group.first_seen), to_timestamp(group.last_seen), ), ), ) rule = Rule(label="An example rule") interface_list = [] for interface in event.interfaces.itervalues(): body = interface.to_email_html(event) if not body: continue interface_list.append((interface.get_title(), mark_safe(body))) return MailPreview( html_template='sentry/emails/error.html', text_template='sentry/emails/error.txt', context={ 'rule': rule, 'group': group, 'event': event, 'link': 'http://example.com/link', 'interfaces': interface_list, 'tags': event.get_tags(), 'project_label': project.name, 'tags': [('logger', 'javascript'), ('environment', 'prod'), ('level', 'error'), ('device', 'Other')] }, ).render(request)
def test_no_conflict(self): org = Organization(name='matt') slugify_instance(org, 'matt') assert org.slug == 'matt' assert not Organization.objects.filter(slug='matt').exists()
def alert(request): platform = request.GET.get("platform", "python") org = Organization(id=1, slug="example", name="Example") project = Project(id=1, slug="example", name="Example", organization=org) random = get_random(request) group = next(make_group_generator(random, project)) data = dict(load_data(platform)) data["message"] = group.message data["event_id"] = "44f1419e73884cd2b45c79918f4b6dc4" data.pop("logentry", None) data["environment"] = "prod" data["tags"] = [ ("logger", "javascript"), ("environment", "prod"), ("level", "error"), ("device", "Other"), ] event_manager = EventManager(data) event_manager.normalize() data = event_manager.get_data() event = event_manager.save(project.id) # Prevent CI screenshot from constantly changing event.data["timestamp"] = 1504656000.0 # datetime(2017, 9, 6, 0, 0) event_type = get_event_type(event.data) group.message = event.search_message group.data = {"type": event_type.key, "metadata": event_type.get_metadata(data)} rule = Rule(label="An example rule") # XXX: this interface_list code needs to be the same as in # src/sentry/mail/adapter.py interface_list = [] for interface in event.interfaces.values(): body = interface.to_email_html(event) if not body: continue text_body = interface.to_string(event) interface_list.append((interface.get_title(), mark_safe(body), text_body)) return MailPreview( html_template="sentry/emails/error.html", text_template="sentry/emails/error.txt", context={ "rule": rule, "group": group, "event": event, "timezone": pytz.timezone("Europe/Vienna"), "link": "http://example.com/link", "interfaces": interface_list, "tags": event.tags, "project_label": project.slug, "commits": [ { # TODO(dcramer): change to use serializer "repository": { "status": "active", "name": "Example Repo", "url": "https://github.com/example/example", "dateCreated": "2018-02-28T23:39:22.402Z", "provider": {"id": "github", "name": "GitHub"}, "id": "1", }, "score": 2, "subject": "feat: Do something to raven/base.py", "message": "feat: Do something to raven/base.py\naptent vivamus vehicula tempus volutpat hac tortor", "id": "1b17483ffc4a10609e7921ee21a8567bfe0ed006", "shortId": "1b17483", "author": { "username": "******", "isManaged": False, "lastActive": "2018-03-01T18:25:28.149Z", "id": "1", "isActive": True, "has2fa": False, "name": "*****@*****.**", "avatarUrl": "https://secure.gravatar.com/avatar/51567a4f786cd8a2c41c513b592de9f9?s=32&d=mm", "dateJoined": "2018-02-27T22:04:32.847Z", "emails": [{"is_verified": False, "id": "1", "email": "*****@*****.**"}], "avatar": {"avatarUuid": None, "avatarType": "letter_avatar"}, "lastLogin": "******", "email": "*****@*****.**", }, } ], }, ).render(request)
def report(request): from sentry.tasks import reports random = get_random(request) duration = 60 * 60 * 24 * 7 timestamp = to_timestamp( reports.floor_to_utc_day( to_datetime( random.randint( to_timestamp(datetime(2015, 6, 1, 0, 0, 0, tzinfo=timezone.utc)), to_timestamp(datetime(2016, 7, 1, 0, 0, 0, tzinfo=timezone.utc)), ) ) ) ) start, stop = interval = reports._to_interval(timestamp, duration) organization = Organization( id=1, slug='example', name='Example', ) team = Team( id=1, slug='example', name='Example', organization=organization, ) projects = [] for i in xrange(0, random.randint(1, 8)): name = ' '.join( random.sample( WORDS, random.randint(1, 4) ) ) projects.append( Project( id=i, organization=organization, team=team, slug=slugify(name), name=name, date_added=start - timedelta(days=random.randint(0, 120)), ) ) def make_release_generator(): id_sequence = itertools.count(1) while True: dt = to_datetime( random.randint( timestamp - (30 * 24 * 60 * 60), timestamp, ), ) yield Release( id=next(id_sequence), project=random.choice(projects), version=''.join([ random.choice('0123456789abcdef') for _ in range(40) ]), date_added=dt, date_started=dt, ) release_instances = {} def make_release_id_generator(): release_generator = make_release_generator() while True: release = next(release_generator) release_instances[release.id] = release yield release.id release_id_generator = make_release_id_generator() def build_issue_summaries(): summaries = [] for i in range(3): summaries.append( int(random.weibullvariate(10, 1) * random.paretovariate(0.5)) ) return summaries def build_release_list(): return reports.trim_release_list([ ( next(release_id_generator), max(1, int(random.weibullvariate(20, 0.15))), ) for _ in range(random.randint(0, 10)) ]) def build_usage_summary(): return ( int(random.weibullvariate(3, 1) * random.paretovariate(0.2)), int(random.weibullvariate(5, 1) * random.paretovariate(0.2)), ) def build_calendar_data(project): start, stop = reports.get_calendar_query_range(interval, 3) rollup = 60 * 60 * 24 series = [] weekend = frozenset((5, 6)) value = int(random.weibullvariate(5000, 3)) for timestamp in tsdb.get_optimal_rollup_series(start, stop, rollup)[1]: damping = random.uniform(0.2, 0.6) if to_datetime(timestamp).weekday in weekend else 1 jitter = random.paretovariate(1.2) series.append((timestamp, int(value * damping * jitter))) value = value * random.uniform(0.25, 2) return reports.clean_calendar_data( project, series, start, stop, rollup, stop ) def build_report(project): daily_maximum = random.randint(1000, 10000) rollup = 60 * 60 * 24 series = [( timestamp + (i * rollup), (random.randint(0, daily_maximum), random.randint(0, daily_maximum)) ) for i in xrange(0, 7)] aggregates = [ random.randint(0, daily_maximum * 7) if random.random() < 0.9 else None for _ in xrange(0, 4) ] return reports.Report( series, aggregates, build_issue_summaries(), build_release_list(), build_usage_summary(), build_calendar_data(project), ) if random.random() < 0.85: personal = { 'resolved': random.randint(0, 100), 'users': int(random.paretovariate(0.2)), } else: personal = { 'resolved': 0, 'users': 0, } return MailPreview( html_template='sentry/emails/reports/body.html', text_template='sentry/emails/reports/body.txt', context={ 'duration': reports.durations[duration], 'interval': { 'start': reports.date_format(start), 'stop': reports.date_format(stop), }, 'report': reports.to_context( organization, interval, {project: build_report(project) for project in projects} ), 'organization': organization, 'personal': personal, 'user': request.user, }, ).render(request)
def report(request): from sentry.tasks import reports random = get_random(request) duration = 60 * 60 * 24 * 7 timestamp = to_timestamp( reports.floor_to_utc_day( to_datetime( random.randint( to_timestamp(datetime(2015, 6, 1, 0, 0, 0, tzinfo=timezone.utc)), to_timestamp(datetime(2016, 7, 1, 0, 0, 0, tzinfo=timezone.utc)), ) ) ) ) start, stop = interval = reports._to_interval(timestamp, duration) organization = Organization(id=1, slug="example", name="Example") projects = [] for i in range(0, random.randint(1, 8)): name = " ".join(random.sample(loremipsum.words, random.randint(1, 4))) projects.append( Project( id=i, organization=organization, slug=slugify(name), name=name, date_added=start - timedelta(days=random.randint(0, 120)), ) ) def make_release_generator(): id_sequence = itertools.count(1) while True: dt = to_datetime(random.randint(timestamp - (30 * 24 * 60 * 60), timestamp)) p = random.choice(projects) yield Release( id=next(id_sequence), project=p, organization_id=p.organization_id, version="".join(random.choice("0123456789abcdef") for _ in range(40)), date_added=dt, ) def build_issue_summaries(): summaries = [] for i in range(3): summaries.append(int(random.weibullvariate(10, 1) * random.paretovariate(0.5))) return summaries def build_usage_outcomes(): return ( int(random.weibullvariate(3, 1) * random.paretovariate(0.2)), int(random.weibullvariate(3, 1) * random.paretovariate(0.2)), int(random.weibullvariate(3, 1) * random.paretovariate(0.2)), int(random.weibullvariate(5, 1) * random.paretovariate(0.2)), ) def build_calendar_data(project): start, stop = reports.get_calendar_query_range(interval, 3) rollup = 60 * 60 * 24 series = [] weekend = frozenset((5, 6)) value = int(random.weibullvariate(5000, 3)) for timestamp in tsdb.get_optimal_rollup_series(start, stop, rollup)[1]: damping = random.uniform(0.2, 0.6) if to_datetime(timestamp).weekday in weekend else 1 jitter = random.paretovariate(1.2) series.append((timestamp, int(value * damping * jitter))) value = value * random.uniform(0.25, 2) return reports.clean_calendar_data(project, series, start, stop, rollup, stop) def build_report(project): daily_maximum = random.randint(1000, 10000) rollup = 60 * 60 * 24 series = [ ( timestamp + (i * rollup), (random.randint(0, daily_maximum), random.randint(0, daily_maximum)), ) for i in range(0, 7) ] aggregates = [ random.randint(0, daily_maximum * 7) if random.random() < 0.9 else None for _ in range(0, 4) ] return reports.Report( series, aggregates, build_issue_summaries(), build_usage_outcomes(), build_calendar_data(project), ) if random.random() < 0.85: personal = {"resolved": random.randint(0, 100), "users": int(random.paretovariate(0.2))} else: personal = {"resolved": 0, "users": 0} return MailPreview( html_template="sentry/emails/reports/body.html", text_template="sentry/emails/reports/body.txt", context={ "duration": reports.durations[duration], "interval": {"start": reports.date_format(start), "stop": reports.date_format(stop)}, "report": reports.to_context( organization, interval, {project: build_report(project) for project in projects} ), "organization": organization, "personal": personal, "user": request.user, }, ).render(request)
def createuser(email, password, superuser, no_password, no_input): "Create a new user." if not no_input: if not email: email = _get_email() if not (password or no_password): password = _get_password() if superuser is None: superuser = _get_superuser() if superuser is None: superuser = False if not email: raise click.ClickException('Invalid or missing email address.') # TODO(mattrobenolt): Accept password over stdin? if not no_password and not password: raise click.ClickException('No password set and --no-password not passed.') from sentry import roles from sentry.models import User from django.conf import settings user = User( email=email, username=email, is_superuser=superuser, is_staff=superuser, is_active=True, ) if password: user.set_password(password) user.save() click.echo('User created: %s' % (email, )) # TODO(dcramer): kill this when we improve flows if settings.SENTRY_SINGLE_ORGANIZATION: from sentry.models import (Organization, OrganizationMember, OrganizationMemberTeam, Team) org = Organization.get_default() if superuser: role = roles.get_top_dog().id else: role = org.default_role member = OrganizationMember.objects.create( organization=org, user=user, role=role, ) # if we've only got a single team let's go ahead and give # access to that team as its likely the desired outcome teams = list(Team.objects.filter(organization=org)[0:2]) if len(teams) == 1: OrganizationMemberTeam.objects.create( team=teams[0], organizationmember=member, ) click.echo('Added to organization: %s' % (org.slug, ))
from sentry.utils.runner import configure configure() from sentry.models import Team, Project, ProjectKey, User, Organization user = User() user.username = '******' user.email = 'admin@localhost' user.is_superuser = True user.set_password('admin') user.save() organization = Organization() organization.name = 'MyOrg' organization.owner = user organization.save() team = Team() team.name = 'Sentry' team.organization = organization team.owner = user team.save() project = Project() project.team = team project.name = 'Default' project.organization = organization project.save() f = open('dsn', 'wb') key = ProjectKey.objects.filter(project=project)[0]
# Do something crazy from sentry.models import ( Team, Project, ProjectKey, User, Organization, OrganizationMember, OrganizationMemberTeam ) DEFAULT_ORGANIZATION = 'Default' DEFAULT_TEAM = 'Default' DEFAULT_PROJECT = 'Default' organizations = Organization.objects.filter(name=DEFAULT_ORGANIZATION) if organizations.count(): organization = organizations.first() else: organization = Organization() organization.name = DEFAULT_ORGANIZATION organization.save() teams = Team.objects.filter(name=DEFAULT_TEAM) if teams.count(): team = teams.first() else: team = Team() team.name = DEFAULT_TEAM team.organization = organization team.save() projects = Project.objects.filter(name=DEFAULT_PROJECT) if projects.count(): project = projects.first()
#!/srv/sentry/application/current/bin/python {% from "sentry/map.jinja" import sentry with context %} from sentry.utils.runner import configure configure() from sentry.models import Team, Project, ProjectKey, User, Organization user = User() user.username = '******' user.email = '{{sentry.bootstrap.email}}' user.is_superuser = True user.set_password('{{sentry.bootstrap.password}}') user.save() organization = Organization() organization.name = '{{sentry.bootstrap.organization}}' organization.owner = user organization.save() team = Team() team.organization = organization team.name = '{{sentry.bootstrap.team}}' team.owner = user team.save() project = Project() project.organization = organization project.team = team project.owner = user project.name = '{{sentry.bootstrap.project}}'
def test_get_scope_organization(self): organization = Organization(id=1) scope_type, scope_identifier = get_scope(self.user, organization=organization) assert scope_type == NotificationScopeType.ORGANIZATION assert scope_identifier == organization.id
def serialize( # type: ignore self, obj: Organization, attrs: Mapping[str, Any], user: User, access: Access ) -> DetailedOrganizationSerializerResponse: # TODO: rectify access argument overriding parent if we want to remove above type ignore from sentry import experiments onboarding_tasks = list( OrganizationOnboardingTask.objects.filter(organization=obj).select_related("user") ) experiment_assignments = experiments.all(org=obj, actor=user) context = cast(DetailedOrganizationSerializerResponse, super().serialize(obj, attrs, user)) max_rate = quotas.get_maximum_quota(obj) context["experiments"] = experiment_assignments context["quota"] = { "maxRate": max_rate[0], "maxRateInterval": max_rate[1], "accountLimit": int( OrganizationOption.objects.get_value( organization=obj, key="sentry:account-rate-limit", default=ACCOUNT_RATE_LIMIT_DEFAULT, ) ), "projectLimit": int( OrganizationOption.objects.get_value( organization=obj, key="sentry:project-rate-limit", default=PROJECT_RATE_LIMIT_DEFAULT, ) ), } context.update( { "isDefault": obj.is_default, "defaultRole": obj.default_role, "availableRoles": [{"id": r.id, "name": r.name} for r in roles.get_all()], "openMembership": bool(obj.flags.allow_joinleave), "require2FA": bool(obj.flags.require_2fa), "requireEmailVerification": bool( features.has("organizations:required-email-verification", obj) and obj.flags.require_email_verification ), "allowSharedIssues": not obj.flags.disable_shared_issues, "enhancedPrivacy": bool(obj.flags.enhanced_privacy), "dataScrubber": bool( obj.get_option("sentry:require_scrub_data", REQUIRE_SCRUB_DATA_DEFAULT) ), "dataScrubberDefaults": bool( obj.get_option("sentry:require_scrub_defaults", REQUIRE_SCRUB_DEFAULTS_DEFAULT) ), "sensitiveFields": obj.get_option( "sentry:sensitive_fields", SENSITIVE_FIELDS_DEFAULT ) or [], "safeFields": obj.get_option("sentry:safe_fields", SAFE_FIELDS_DEFAULT) or [], "storeCrashReports": convert_crashreport_count( obj.get_option("sentry:store_crash_reports") ), "attachmentsRole": str( obj.get_option("sentry:attachments_role", ATTACHMENTS_ROLE_DEFAULT) ), "debugFilesRole": str( obj.get_option("sentry:debug_files_role", DEBUG_FILES_ROLE_DEFAULT) ), "eventsMemberAdmin": bool( obj.get_option("sentry:events_member_admin", EVENTS_MEMBER_ADMIN_DEFAULT) ), "alertsMemberWrite": bool( obj.get_option("sentry:alerts_member_write", ALERTS_MEMBER_WRITE_DEFAULT) ), "scrubIPAddresses": bool( obj.get_option( "sentry:require_scrub_ip_address", REQUIRE_SCRUB_IP_ADDRESS_DEFAULT ) ), "scrapeJavaScript": bool( obj.get_option("sentry:scrape_javascript", SCRAPE_JAVASCRIPT_DEFAULT) ), "allowJoinRequests": bool( obj.get_option("sentry:join_requests", JOIN_REQUESTS_DEFAULT) ), "relayPiiConfig": str(obj.get_option("sentry:relay_pii_config") or "") or None, } ) trusted_relays_raw = obj.get_option("sentry:trusted-relays") or [] # serialize trusted relays info into their external form context["trustedRelays"] = [TrustedRelaySerializer(raw).data for raw in trusted_relays_raw] context["access"] = access.scopes if access.role is not None: context["role"] = access.role context["pendingAccessRequests"] = OrganizationAccessRequest.objects.filter( team__organization=obj ).count() context["onboardingTasks"] = serialize(onboarding_tasks, user, OnboardingTasksSerializer()) return context
os.environ['SENTRY_CONF'] = "/home/sentry/sentry.conf.py" # Bootstrap the Sentry environment from sentry.runner import configure configure() # Create the org, team and project if needed from sentry.models import Team, Project, ProjectKey, User, Organization, OrganizationMember user = User.objects.get(pk=1) name = 'AgoraVoting' name2 = 'AuthApi' if Organization.objects.filter(name=name).count() == 0: organization = Organization() organization.name = name organization.save() om = OrganizationMember() om.organization = organization om.role = 'owner' om.user = user om.save() team = Team() team.name = name team.organization = organization team.save() project = Project()