Example #1
0
File: faux.py Project: yaoqi/sentry
    def _kwarg_value(self, key, **kwargs):
        """
        Support a dot notation shortcut for deeply nested dicts or just look
        up the value if passed a normal key.

        >>> self.kwargs = {'foo': {'bar': {'baz': 1}}}
        >>> self._kwarg_value('foo.bar.baz')
        1
        >>> self._kwarg_value('foo')
        {'bar': {'baz': 1}}
        """
        if '.' in key:
            keys = deque(key.split('.'))
        else:
            kwarg = self.kwargs[key]

            if kwargs.get('format') == 'json':
                return json.loads(kwarg)

            return kwarg

        kwarg = dict(self.kwargs)

        if kwargs.get('format') == 'json':
            kwarg = json.loads(kwarg[keys.popleft()])

        while keys:
            kwarg = kwarg[keys.popleft()]

        return kwarg
Example #2
0
    def test_compare_commits_no_start(self):
        responses.add(
            responses.GET,
            'https://example.gitlab.com/api/v4/projects/%s/repository/commits/xyz' % self.gitlab_id,
            json={'created_at': '2018-09-19T13:14:15Z'}
        )
        responses.add(
            responses.GET,
            'https://example.gitlab.com/api/v4/projects/%s/repository/commits?until=2018-09-19T13:14:15Z' % self.gitlab_id,
            json=json.loads(COMMIT_LIST_RESPONSE)
        )
        responses.add(
            responses.GET,
            'https://example.gitlab.com/api/v4/projects/%s/repository/commits/ed899a2f4b50b4370feeea94676502b42383c746/diff' % self.gitlab_id,
            json=json.loads(COMMIT_DIFF_RESPONSE)
        )
        responses.add(
            responses.GET,
            'https://example.gitlab.com/api/v4/projects/%s/repository/commits/6104942438c14ec7bd21c6cd5bd995272b3faff6/diff' % self.gitlab_id,
            json=json.loads(COMMIT_DIFF_RESPONSE)
        )

        response = self.create_repository(self.default_repository_config,
                                          self.integration.id)
        repo = Repository.objects.get(pk=response.data['id'])
        commits = self.provider.compare_commits(repo, None, 'xyz')
        for commit in commits:
            assert_commit_shape(commit)
Example #3
0
    def put(self, request):
        """
        Verify a User
        `````````````

        This endpoint verifies the currently authenticated user (for example, to gain superuser).

        :auth: required
        """
        if not request.user.is_authenticated():
            return Response(status=status.HTTP_401_UNAUTHORIZED)

        validator = AuthVerifyValidator(data=request.DATA)
        if not validator.is_valid():
            return self.respond(validator.errors, status=status.HTTP_400_BAD_REQUEST)

        authenticated = False

        # See if we have a u2f challenge/response
        if 'challenge' in validator.object and 'response' in validator.object:
            try:
                interface = Authenticator.objects.get_interface(request.user, 'u2f')
                if not interface.is_enrolled:
                    raise LookupError()

                challenge = json.loads(validator.object['challenge'])
                response = json.loads(validator.object['response'])
                authenticated = interface.validate_response(request, challenge, response)
            except ValueError:
                pass
            except LookupError:
                pass

        # attempt password authentication
        else:
            authenticated = request.user.check_password(validator.object['password'])

        # UI treats 401s by redirecting, this 401 should be ignored
        if not authenticated:
            return Response({'detail': {'code': 'ignore'}}, status=status.HTTP_403_FORBIDDEN)

        try:
            # Must use the real request object that Django knows about
            auth.login(request._request, request.user)
        except auth.AuthUserPasswordExpired:
            return Response(
                {
                    'code': 'password-expired',
                    'message': 'Cannot sign-in with basic auth because password has expired.',
                },
                status=status.HTTP_403_FORBIDDEN
            )

        request.user = request._request.user

        return self.get(request)
Example #4
0
    def test_resolve_issue(self):
        status_action = {
            'name': 'resolve_dialog',
            'value': 'resolve_dialog',
        }

        # Expect request to open dialog on slack
        responses.add(
            method=responses.POST,
            url='https://slack.com/api/dialog.open',
            body='{"ok": true}',
            status=200,
            content_type='application/json',
        )

        resp = self.post_webhook(action_data=[status_action])
        assert resp.status_code == 200, resp.content

        # Opening dialog should *not* cause the current message to be updated
        assert resp.content == ''

        data = parse_qs(responses.calls[0].request.body)
        assert data['token'][0] == self.integration.metadata['access_token']
        assert data['trigger_id'][0] == self.trigger_id
        assert 'dialog' in data

        dialog = json.loads(data['dialog'][0])
        callback_data = json.loads(dialog['callback_id'])
        assert int(callback_data['issue']) == self.group1.id
        assert callback_data['orig_response_url'] == self.response_url

        # Completing the dialog will update the message
        responses.add(
            method=responses.POST,
            url=self.response_url,
            body='{"ok": true}',
            status=200,
            content_type='application/json',
        )

        resp = self.post_webhook(
            type='dialog_submission',
            callback_id=dialog['callback_id'],
            data={'submission': {'resolve_type': 'resolved'}}
        )
        self.group1 = Group.objects.get(id=self.group1.id)

        assert resp.status_code == 200, resp.content
        assert self.group1.get_status() == GroupStatus.RESOLVED

        update_data = json.loads(responses.calls[1].request.body)

        expect_status = u'*Issue resolved by <@{}>*'.format(self.identity.external_id)
        assert update_data['text'].endswith(expect_status)
Example #5
0
    def enroll(self, request, interface, insecure=False):
        challenge = request.POST.get('challenge')
        if challenge:
            interface.enrollment_data = json.loads(challenge)

        response = request.POST.get('response')
        if response:
            interface.try_enroll(json.loads(response))
            return TwoFactorSettingsView.enroll(self, request, interface)

        context = self.make_context(request, interface)
        return render_to_response('sentry/account/twofactor/enroll_u2f.html',
                                  context, request)
Example #6
0
    def handle(self, request):
        user = auth.get_pending_2fa_user(request)
        if user is None or request.user.is_authenticated():
            return HttpResponseRedirect(auth.get_login_url())

        interfaces = Authenticator.objects.all_interfaces_for_user(user)

        # If for whatever reason we ended up here but the user has no 2FA
        # enabled, we just continue successfully.
        if not interfaces:
            return self.perform_signin(request, user)

        challenge = activation = None
        interface = self.negotiate_interface(request, interfaces)
        if request.method == "GET":
            activation = interface.activate(request)
            if activation is not None and activation.type == "challenge":
                challenge = activation.challenge
        elif "challenge" in request.POST:
            challenge = json.loads(request.POST["challenge"])

        form = TwoFactorForm()

        # If an OTP response was supplied, we try to make it pass.
        otp = request.POST.get("otp")
        if otp:
            used_interface = self.validate_otp(otp, interface, interfaces)
            if used_interface is not None:
                return self.perform_signin(request, user, used_interface)
            self.fail_signin(request, user, form)

        # If a challenge and response exists, validate
        if challenge:
            response = request.POST.get("response")
            if response:
                response = json.loads(response)
                if interface.validate_response(request, challenge, response):
                    return self.perform_signin(request, user, interface)
                self.fail_signin(request, user, form)

        return render_to_response(
            ["sentry/twofactor_%s.html" % interface.interface_id, "sentry/twofactor.html"],
            {
                "form": form,
                "interface": interface,
                "other_interfaces": self.get_other_interfaces(interface, interfaces),
                "activation": activation,
            },
            request,
            status=200,
        )
Example #7
0
    def handle_sudo(self, request, redirect_to, context):
        interface = Authenticator.objects.get_interface(request.user, 'u2f')

        if interface.is_available and interface.is_enrolled:
            challenge = interface.activate(request).challenge
            if request.method == 'POST':
                if 'challenge' in request.POST:
                    challenge = json.loads(request.POST['challenge'])
                if 'response' in request.POST:
                    response = json.loads(request.POST['response'])
                    if interface.validate_response(request, challenge, response):
                        return True
            context['u2f_challenge'] = challenge

        return BaseSudoView.handle_sudo(self, request, redirect_to, context)
Example #8
0
 def test_compare_commits_no_start(self):
     stub_installation_token()
     responses.add(
         responses.GET,
         'https://api.github.com/repos/getsentry/example-repo/commits?sha=abcdef',
         json=json.loads(GET_LAST_COMMITS_EXAMPLE)
     )
     responses.add(
         responses.GET,
         'https://api.github.com/repos/getsentry/example-repo/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e',
         json=json.loads(GET_COMMIT_EXAMPLE)
     )
     result = self.provider.compare_commits(self.repository, None, 'abcdef')
     for commit in result:
         assert_commit_shape(commit)
Example #9
0
    def handle(self, request):
        user = auth.get_pending_2fa_user(request)
        if user is None or request.user.is_authenticated():
            return HttpResponseRedirect(reverse('sentry'))

        interfaces = Authenticator.objects.all_interfaces_for_user(user)

        # If for whatever reason we ended up here but the user has no 2FA
        # enabled, we just continue successfully.
        if not interfaces:
            return self.perform_signin(request, user)

        challenge = activation = None
        interface = self.negotiate_interface(request, interfaces)
        if request.method == 'GET':
            activation = interface.activate(request)
            if activation is not None and activation.type == 'challenge':
                challenge = activation.challenge
        elif 'challenge' in request.POST:
            challenge = json.loads(request.POST['challenge'])

        form = TwoFactorForm()

        # If an OTP response was supplied, we try to make it pass.
        otp = request.POST.get('otp')
        if otp:
            used_interface = self.validate_otp(otp, interface, interfaces)
            if used_interface is not None:
                return self.perform_signin(request, user, used_interface)
            self.fail_signin(request, user, form)

        # If a challenge and response exists, validate
        if challenge:
            response = request.POST.get('response')
            if response:
                response = json.loads(response)
                if interface.validate_response(request, challenge, response):
                    return self.perform_signin(request, user, interface)
                self.fail_signin(request, user, form)

        return render_to_response(['sentry/twofactor_%s.html' %
                                   interface.interface_id,
                                   'sentry/twofactor.html'], {
            'form': form,
            'interface': interface,
            'other_interfaces': self.get_other_interfaces(interface, interfaces),
            'activation': activation,
        }, request, status=200)
    def dispatch(self, request, helper):
        access_token = helper.fetch_state('data')['access_token']

        req = safe_urlopen('{0}?{1}&alt=json'.format(
            USER_DETAILS_ENDPOINT,
            urlencode({
                'access_token': access_token,
            })
        ))
        body = safe_urlread(req)
        data = json.loads(body)

        if not data.get('data'):
            logger.error('Invalid response: %s' % body)
            return helper.error(ERR_INVALID_RESPONSE)

        if not data.get('data').get('email'):
            logger.error('Invalid response: %s' % body)
            return helper.error(ERR_INVALID_RESPONSE)

        domain = extract_domain(data.get('data').get('email'))

        if domain in DOMAIN_BLOCKLIST:
            return helper.error(ERR_INVALID_DOMAIN % (domain,))

        if self.domain and self.domain != domain:
            return helper.error(ERR_INVALID_DOMAIN % (domain,))

        helper.bind_state('domain', domain)
        helper.bind_state('user', data.get('data'))

        return helper.next_step()
Example #11
0
    def request(self, method, path, user, auth=None, params=None, data=None,
                is_sudo=False):
        full_path = self.prefix + path

        resolver_match = resolve(full_path)
        callback, callback_args, callback_kwargs = resolver_match

        if data:
            # we encode to ensure compatibility
            data = json.loads(json.dumps(data))

        rf = APIRequestFactory()
        mock_request = getattr(rf, method.lower())(full_path, data)
        mock_request.auth = auth
        mock_request.user = user
        mock_request.is_sudo = lambda: is_sudo

        if params:
            mock_request.GET._mutable = True
            mock_request.GET.update(params)
            mock_request.GET._mutable = False

        if data:
            mock_request.POST._mutable = True
            mock_request.POST.update(data)
            mock_request.POST._mutable = False

        response = callback(mock_request, *callback_args, **callback_kwargs)
        if 200 <= response.status_code < 400:
            return response
        raise self.ApiError(response.status_code, response.data)
Example #12
0
    def refresh_identity(self, auth_identity):
        refresh_token = auth_identity.data.get("refresh_token")

        if not refresh_token:
            raise IdentityNotValid("Missing refresh token")

        data = self.get_refresh_token_params(refresh_token=refresh_token)
        req = safe_urlopen(self.get_refresh_token_url(), data=data)

        try:
            body = safe_urlread(req)
            payload = json.loads(body)
        except Exception:
            payload = {}

        error = payload.get("error", "unknown_error")
        error_description = payload.get("error_description", "no description available")

        formatted_error = "HTTP {} ({}): {}".format(req.status_code, error, error_description)

        if req.status_code == 401:
            raise IdentityNotValid(formatted_error)

        if req.status_code == 400:
            # this may not be common, but at the very least Google will return
            # an invalid grant when a user is suspended
            if error == "invalid_grant":
                raise IdentityNotValid(formatted_error)

        if req.status_code != 200:
            raise Exception(formatted_error)

        auth_identity.data.update(self.get_oauth_data(payload))
        auth_identity.update(data=auth_identity.data)
Example #13
0
def sourcemap_to_index(sourcemap):
    smap = json.loads(sourcemap)

    state_list = []
    key_list = []
    src_list = set()
    content = None

    if 'sourcesContent' in smap:
        content = {}
        for idx, source in enumerate(smap['sources']):
            if smap['sourcesContent'][idx]:
                content[source] = smap['sourcesContent'][idx].splitlines()
            else:
                content[source] = []

    for state in parse_sourcemap(smap):
        state_list.append(state)
        key_list.append((state.dst_line, state.dst_col))

        # Apparently it's possible to not have a src
        # specified in the vlq segments
        if state.src is not None:
            src_list.add(state.src)

    return SourceMapIndex(state_list, key_list, src_list, content)
Example #14
0
    def exchange_token(self, request, helper, code):
        # TODO: this needs the auth yet
        data = self.get_token_params(code=code, redirect_uri=absolute_uri(helper.get_redirect_url()))
        req = safe_urlopen(self.access_token_url, data=data)
        body = safe_urlread(req)

        return json.loads(body)
Example #15
0
    def test_simple_notification(self):
        responses.add(
            'POST',
            'https://alert.victorops.com/integrations/generic/20131114/alert/secret-api-key/everyone',
            body=SUCCESS
        )
        self.plugin.set_option('api_key', 'secret-api-key', self.project)
        self.plugin.set_option('routing_key', 'everyone', self.project)

        group = self.create_group(message='Hello world', culprit='foo.bar')
        event = self.create_event(group=group, message='Hello world', tags={'level': 'warning'})

        rule = Rule.objects.create(project=self.project, label='my rule')

        notification = Notification(event=event, rule=rule)

        with self.options({'system.url-prefix': 'http://example.com'}):
            self.plugin.notify(notification)

        request = responses.calls[0].request
        payload = json.loads(request.body)
        assert {
            'message_type': 'WARNING',
            'entity_id': group.id,
            'entity_display_name': 'Hello world',
            'monitoring_tool': 'sentry',
            'state_message': 'Stacktrace\n-----------\n\nStacktrace (most recent call last):\n\n  File "sentry/models/foo.py", line 29, in build_msg\n    string_max_length=self.string_max_length)\n\nMessage\n-----------\n\nHello world',
            'timestamp': int(event.datetime.strftime('%s')),
            'issue_url': 'http://example.com/baz/bar/issues/%s/' % group.id,
        } == payload
Example #16
0
    def test_link_issue(self):
        responses.add(
            responses.GET,
            'https://gitlab.com/api/v4/projects/getsentry%2Fsentry/issues/1',
            body='{"iid": 1, "id": "10", "title": "Hello world"}',
            match_querystring=True
        )
        responses.add(
            responses.POST,
            'https://gitlab.com/api/v4/projects/getsentry%2Fsentry/issues/1/notes',
            body='{"body": "Hello"}'
        )

        self.plugin.set_option('gitlab_url', 'https://gitlab.com', self.project)
        self.plugin.set_option('gitlab_repo', 'getsentry/sentry', self.project)
        self.plugin.set_option('gitlab_token', 'abcdefg', self.project)
        group = self.create_group(message='Hello world', culprit='foo.bar')

        request = self.request.get('/')
        request.user = self.user
        form_data = {
            'comment': 'Hello',
            'issue_id': '1',
        }

        self.login_as(self.user)

        assert self.plugin.link_issue(request, group, form_data) == {
            'title': 'Hello world',
        }
        request = responses.calls[-1].request
        payload = json.loads(request.body)
        assert payload == {
            'body': 'Hello',
        }
Example #17
0
    def _make_request(self):
        try:
            body = safe_urlread(
                safe_urlopen(
                    url=self._build_url(),
                    headers=self._build_headers(),
                )
            )

            response = json.loads(body)
        except Exception as e:
            logger.info(
                'select-requester.error',
                extra={
                    'sentry_app': self.sentry_app.slug,
                    'install': self.install.uuid,
                    'project': self.project and self.project.slug,
                    'uri': self.uri,
                    'error_message': e.message,
                }
            )
            response = {}

        if not self._validate_response(response):
            raise APIError()

        return self._format_response(response)
Example #18
0
def sourcemap_to_index(sourcemap):
    smap = json.loads(sourcemap)

    state_list = []
    key_list = []
    src_list = set()
    content = {}
    sourceRoot = smap.get('sourceRoot')

    # turn /foo/bar into /foo/bar/ so urljoin doesnt strip the last path
    if sourceRoot and not sourceRoot.endswith('/'):
        sourceRoot = sourceRoot + '/'

    if 'sourcesContent' in smap:
        for idx, source in enumerate(smap['sources']):
            # Ensure we handle null files that may be specified outside of
            # sourcesContent
            if smap['sourcesContent'][idx]:
                # Apply the root to the source before shoving into the index
                # so we can look it up correctly later
                source = urljoin(sourceRoot, source)
                content[source] = smap['sourcesContent'][idx].splitlines()

    for state in parse_sourcemap(smap):
        state_list.append(state)
        key_list.append((state.dst_line, state.dst_col))

        # Apparently it's possible to not have a src
        # specified in the vlq segments
        if state.src is not None:
            src_list.add(state.src)

    return SourceMapIndex(state_list, key_list, src_list, content)
Example #19
0
    def after_link_issue(self, mock_get_jwt):
        responses.add(
            responses.POST,
            'https://api.github.com/installations/github_external_id/access_tokens',
            json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
        )

        responses.add(
            responses.POST,
            'https://api.github.com/repos/getsentry/sentry/issues/321/comments',
            json={'body': 'hello'}
        )

        data = {'comment': 'hello'}
        external_issue = ExternalIssue.objects.create(
            organization_id=self.organization.id,
            integration_id=self.model.id,
            key='hello#321',
        )

        self.integration.after_link_issue(external_issue, data=data)

        request = responses.calls[0].request
        assert request.headers['Authorization'] == 'Bearer jwt_token_1'

        request = responses.calls[1].request
        assert request.headers['Authorization'] == 'token token_1'
        payload = json.loads(request.body)
        assert payload == {'body': 'hello'}
Example #20
0
    def view(self, request, group, **kwargs):
        if request.GET.get('autocomplete_query'):
            query = request.GET.get('q')
            if not query:
                return JSONResponse({'issues': []})
            repo = self.get_option('repo', group.project)
            query = 'repo:%s %s' % (repo, query)
            url = 'https://api.github.com/search/issues?%s' % (urlencode({'q': query}),)

            try:
                req = self.make_api_request(request.user, url)
                body = safe_urlread(req)
            except requests.RequestException as e:
                msg = unicode(e)
                self.handle_api_error(request, msg)
                return JSONResponse({}, status=502)

            try:
                json_resp = json.loads(body)
            except ValueError as e:
                msg = unicode(e)
                self.handle_api_error(request, msg)
                return JSONResponse({}, status=502)

            issues = [{
                'text': '(#%s) %s' % (i['number'], i['title']),
                'id': i['number']
            } for i in json_resp.get('items', [])]
            return JSONResponse({'issues': issues})

        return super(GitHubPlugin, self).view(request, group, **kwargs)
Example #21
0
    def get_issue_title_by_id(self, request, group, issue_id):
        url = '%s/%s' % (self.build_api_url(group, 'issues'), issue_id)
        req = self.make_api_request(request.user, url)

        body = safe_urlread(req)
        json_resp = json.loads(body)
        return json_resp['title']
Example #22
0
    def github_request(self, request, url, **kwargs):
        """
        Make a GitHub request on behalf of the logged in user. Return JSON
        response on success or raise forms.ValidationError on any exception
        """
        auth = self.get_auth_for_user(user=request.user)
        if auth is None:
            raise forms.ValidationError(_("You have not yet associated GitHub with your account."))

        headers = kwargs.pop("headers", None) or {}
        headers["Authorization"] = "token %s" % auth.tokens["access_token"]
        try:
            req = safe_urlopen(url, headers=headers, **kwargs)
            body = safe_urlread(req)
        except requests.RequestException as e:
            msg = unicode(e)
            raise forms.ValidationError(_("Error communicating with GitHub: %s") % (msg,))

        try:
            json_resp = json.loads(body)
        except ValueError as e:
            msg = unicode(e)
            raise forms.ValidationError(_("Error communicating with GitHub: %s") % (msg,))

        if req.status_code > 399:
            raise forms.ValidationError(json_resp["message"])

        return json_resp
Example #23
0
    def test_create_issue(self, mock_get_jwt):
        responses.add(
            responses.POST,
            'https://api.github.com/installations/github_external_id/access_tokens',
            json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
        )

        responses.add(
            responses.POST,
            'https://api.github.com/repos/getsentry/sentry/issues',
            json={'number': 321, 'title': 'hello', 'body': 'This is the description',
                  'html_url': 'https://github.com/getsentry/sentry/issues/231'}
        )

        form_data = {
            'repo': 'getsentry/sentry',
            'title': 'hello',
            'description': 'This is the description',
        }

        assert self.integration.create_issue(form_data) == {
            'key': 321,
            'description': 'This is the description',
            'title': 'hello',
            'url': 'https://github.com/getsentry/sentry/issues/231',
            'repo': 'getsentry/sentry',
        }
        request = responses.calls[0].request
        assert request.headers['Authorization'] == 'Bearer jwt_token_1'

        request = responses.calls[1].request
        assert request.headers['Authorization'] == 'token token_1'
        payload = json.loads(request.body)
        assert payload == {'body': 'This is the description', 'assignee': None, 'title': 'hello'}
Example #24
0
    def create_issue(self, request, group, form_data, **kwargs):
        # TODO: support multiple identities via a selection input in the form?
        json_data = {
            "title": form_data['title'],
            "body": form_data['description'],
            "assignee": form_data.get('assignee'),
        }

        try:
            url = self.build_api_url(group, 'issues')
            req = self.make_api_request(request.user, url, json_data=json_data)
            body = safe_urlread(req)
        except requests.RequestException as e:
            msg = unicode(e)
            raise forms.ValidationError(_('Error communicating with GitHub: %s') % (msg,))

        try:
            json_resp = json.loads(body)
        except ValueError as e:
            msg = unicode(e)
            raise forms.ValidationError(_('Error communicating with GitHub: %s') % (msg,))

        if req.status_code > 399:
            raise forms.ValidationError(json_resp['message'])

        return json_resp['number']
Example #25
0
    def _handle_builtin(self, request, project):
        endpoint = '/projects/{}/{}/releases/'.format(
            project.organization.slug,
            project.slug,
        )
        try:
            # Ideally the API client would support some kind of god-mode here
            # as we've already confirmed credentials and simply want to execute
            # the view code. Instead we hack around it with an ApiKey instance
            god = ApiKey(
                organization=project.organization,
                scopes=getattr(ApiKey.scopes, 'project:write'),
            )

            resp = client.post(
                endpoint,
                data=json.loads(request.body),
                auth=god,
            )
        except client.ApiError as exc:
            return HttpResponse(
                status=exc.status_code,
                content=exc.body,
                content_type='application/json',
            )
        return HttpResponse(
            status=resp.status_code,
            content=json.dumps(resp.data),
            content_type='application/json',
        )
Example #26
0
 def exchange_token(self, request, pipeline, code):
     # TODO: this needs the auth yet
     data = self.get_token_params(
         code=code,
         redirect_uri=absolute_uri(pipeline.redirect_url()),
     )
     verify_ssl = pipeline.config.get('verify_ssl', True)
     try:
         req = safe_urlopen(self.access_token_url, data=data, verify_ssl=verify_ssl)
         body = safe_urlread(req)
         if req.headers.get('Content-Type', '').startswith('application/x-www-form-urlencoded'):
             return dict(parse_qsl(body))
         return json.loads(body)
     except SSLError:
         logger.info('identity.oauth2.ssl-error', extra={
             'url': self.access_token_url,
             'verify_ssl': verify_ssl,
         })
         url = self.access_token_url
         return {
             'error': 'Could not verify SSL certificate',
             'error_description': u'Ensure that {} has a valid SSL certificate'.format(url)
         }
     except JSONDecodeError:
         logger.info('identity.oauth2.json-error', extra={
             'url': self.access_token_url,
         })
         return {
             'error': 'Could not decode a JSON Response',
             'error_description': u'We were not able to parse a JSON response, please try again.'
         }
Example #27
0
    def test_compare_commits(self, mock_compare_commits):
        organization = self.create_organization()
        integration = Integration.objects.create(
            provider='github',
            external_id='1',
        )
        repo = Repository.objects.create(
            name='example-repo',
            provider='integrations:github',
            organization_id=organization.id,
            integration_id=integration.id,
            config={'name': 'example-repo'},
        )

        res = self.provider._format_commits(repo, json.loads(COMPARE_COMMITS_EXAMPLE)['commits'])

        assert res == [
            {
                'author_email': '*****@*****.**',
                'author_name': 'Monalisa Octocat',
                'message': 'Fix all the bugs',
                'id': '6dcb09b5b57875f334f61aebed695e2e4193db5e',
                'repository': 'example-repo'
            }
        ]

        self.provider.compare_commits(repo, 'b' * 40, 'a' * 40)

        assert mock_compare_commits.called
Example #28
0
 def test_valid_params(self):
     resp = self.client.post(self.path, data=json.dumps({
         'version': 'a',
     }), content_type='application/json')
     assert resp.status_code == 201, resp.content
     data = json.loads(resp.content)
     assert data['version'] == 'a'
Example #29
0
def sourcemap_to_index(sourcemap):
    smap = json.loads(sourcemap)

    state_list = []
    key_list = []
    src_list = set()
    content = None
    root = smap.get('sourceRoot')

    if 'sourcesContent' in smap:
        content = {}
        for idx, source in enumerate(smap['sources']):
            # Apply the root to the source before shoving into the index
            # so we can look it up correctly later
            source = urljoin(root, source)
            if smap['sourcesContent'][idx]:
                content[source] = smap['sourcesContent'][idx].splitlines()
            else:
                content[source] = []

    for state in parse_sourcemap(smap):
        state_list.append(state)
        key_list.append((state.dst_line, state.dst_col))

        # Apparently it's possible to not have a src
        # specified in the vlq segments
        if state.src is not None:
            src_list.add(state.src)

    return SourceMapIndex(state_list, key_list, src_list, content)
Example #30
0
def load_data(platform):
    json_path = os.path.join(DATA_ROOT, 'samples', '%s.json' % (platform.encode('utf-8'),))

    if not os.path.exists(json_path):
        return

    with open(json_path) as fp:
        data = json.loads(fp.read())

    data['platform'] = platform
    data['message'] = 'This is an example %s exception' % (
        PLATFORM_TITLES.get(platform, platform.title()),)
    data['sentry.interfaces.User'] = {
        "username": "******",
        "id": "1671",
        "email": "*****@*****.**"
    }
    data['tags'] = [
        ('foo', 'bar'),
        ('version', '1.0'),
    ]
    data['sentry.interfaces.Http'] = {
        "cookies": {},
        "url": "http://example.com/foo",
        "headers": {
            "Referer": "http://example.com",
            "User-Agent": "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36"
        },
        "env": {},
        "query_string": "",
        "data": {},
        "method": "GET"
    }

    return data
Example #31
0
 def process_message(self, message):
     return json.loads(message.value())
Example #32
0
def bulk_raw_query(snuba_param_list, referrer=None):
    headers = {}
    if referrer:
        headers["referer"] = referrer

    query_param_list = map(_prepare_query_params, snuba_param_list)

    def snuba_query(params):
        query_params, forward, reverse, thread_hub = params
        try:
            with timer("snuba_query"):
                referrer = headers.get("referer", "<unknown>")
                if SNUBA_INFO:
                    logger.info("{}.body: {}".format(referrer,
                                                     json.dumps(query_params)))
                    query_params["debug"] = True
                body = json.dumps(query_params)
                with thread_hub.start_span(
                        op="snuba",
                        description=u"query {}".format(referrer)) as span:
                    span.set_tag("referrer", referrer)
                    for param_key, param_data in six.iteritems(query_params):
                        span.set_data(param_key, param_data)
                    return (
                        _snuba_pool.urlopen("POST",
                                            "/query",
                                            body=body,
                                            headers=headers),
                        forward,
                        reverse,
                    )
        except urllib3.exceptions.HTTPError as err:
            raise SnubaError(err)

    with sentry_sdk.start_span(
            op="start_snuba_query",
            description=u"running {} snuba queries".format(
                len(snuba_param_list)),
    ) as span:
        span.set_tag("referrer", headers.get("referer", "<unknown>"))
        if len(snuba_param_list) > 1:
            query_results = list(
                _query_thread_pool.map(snuba_query, [
                    params + (Hub(Hub.current), )
                    for params in query_param_list
                ]))
        else:
            # No need to submit to the thread pool if we're just performing a
            # single query
            query_results = [
                snuba_query(query_param_list[0] + (Hub(Hub.current), ))
            ]

    results = []
    for response, _, reverse in query_results:
        try:
            body = json.loads(response.data)
            if SNUBA_INFO:
                if "sql" in body:
                    logger.info("{}.sql: {}".format(
                        headers.get("referer", "<unknown>"), body["sql"]))
                if "error" in body:
                    logger.info("{}.err: {}".format(
                        headers.get("referer", "<unknown>"), body["error"]))
        except ValueError:
            if response.status != 200:
                logger.error("snuba.query.invalid-json")
                raise SnubaError("Failed to parse snuba error response")
            raise UnexpectedResponseError(
                u"Could not decode JSON response: {}".format(response.data))

        if response.status != 200:
            if body.get("error"):
                error = body["error"]
                if response.status == 429:
                    raise RateLimitExceeded(error["message"])
                elif error["type"] == "schema":
                    raise SchemaValidationError(error["message"])
                elif error["type"] == "clickhouse":
                    raise clickhouse_error_codes_map.get(
                        error["code"], QueryExecutionError)(error["message"])
                else:
                    raise SnubaError(error["message"])
            else:
                raise SnubaError(u"HTTP {}".format(response.status))

        # Forward and reverse translation maps from model ids to snuba keys, per column
        body["data"] = [reverse(d) for d in body["data"]]
        results.append(body)

    return results
Example #33
0
    def post(self, request, project, **kwargs):
        # Minidump request payloads do not have the same structure as
        # usual events from other SDKs. Most notably, the event needs
        # to be transfered in the `sentry` form field. All other form
        # fields are assumed "extra" information. The only exception
        # to this is `upload_file_minidump`, which contains the minidump.

        if any(key.startswith('sentry[') for key in request.POST):
            # First, try to parse the nested form syntax `sentry[key][key]`
            # This is required for the Breakpad client library, which only
            # supports string values of up to 64 characters.
            extra = parser.parse(request.POST.urlencode())
            data = extra.pop('sentry', {})
        else:
            # Custom clients can submit longer payloads and should JSON
            # encode event data into the optional `sentry` field.
            extra = request.POST
            json_data = extra.pop('sentry', None)
            data = json.loads(json_data[0]) if json_data else {}

        # Merge additional form fields from the request with `extra`
        # data from the event payload and set defaults for processing.
        extra.update(data.get('extra', {}))
        data['extra'] = extra

        # Assign our own UUID so we can track this minidump. We cannot trust the
        # uploaded filename, and if reading the minidump fails there is no way
        # we can ever retrieve the original UUID from the minidump.
        event_id = data.get('event_id') or uuid.uuid4().hex
        data['event_id'] = event_id

        # At this point, we only extract the bare minimum information
        # needed to continue processing. This requires to process the
        # minidump without symbols and CFI to obtain an initial stack
        # trace (most likely via stack scanning). If all validations
        # pass, the event will be inserted into the database.
        try:
            minidump = request.FILES['upload_file_minidump']
        except KeyError:
            raise APIError('Missing minidump upload')

        # Breakpad on linux sometimes stores the entire HTTP request body as
        # dump file instead of just the minidump. The Electron SDK then for
        # example uploads a multipart formdata body inside the minidump file.
        # It needs to be re-parsed, to extract the actual minidump before
        # continuing.
        minidump.seek(0)
        if minidump.read(2) == b'--':
            # The remaining bytes of the first line are the form boundary. We
            # have already read two bytes, the remainder is the form boundary
            # (excluding the initial '--').
            boundary = minidump.readline().rstrip()
            minidump.seek(0)

            # Next, we have to fake a HTTP request by specifying the form
            # boundary and the content length, or otherwise Django will not try
            # to parse our form body. Also, we need to supply new upload
            # handlers since they cannot be reused from the current request.
            meta = {
                'CONTENT_TYPE': b'multipart/form-data; boundary=%s' % boundary,
                'CONTENT_LENGTH': minidump.size,
            }
            handlers = [
                uploadhandler.load_handler(handler, request)
                for handler in settings.FILE_UPLOAD_HANDLERS
            ]

            _, files = MultiPartParser(meta, minidump, handlers).parse()
            try:
                minidump = files['upload_file_minidump']
            except KeyError:
                raise APIError('Missing minidump upload')

        if minidump.size == 0:
            raise APIError('Empty minidump upload received')

        if settings.SENTRY_MINIDUMP_CACHE:
            if not os.path.exists(settings.SENTRY_MINIDUMP_PATH):
                os.mkdir(settings.SENTRY_MINIDUMP_PATH, 0o744)

            with open('%s/%s.dmp' % (settings.SENTRY_MINIDUMP_PATH, event_id), 'wb') as out:
                for chunk in minidump.chunks():
                    out.write(chunk)

        # Always store the minidump in attachments so we can access it during
        # processing, regardless of the event-attachments feature. This will
        # allow us to stack walk again with CFI once symbols are loaded.
        attachments = []
        minidump.seek(0)
        attachments.append(CachedAttachment.from_upload(minidump, type=MINIDUMP_ATTACHMENT_TYPE))

        # Append all other files as generic attachments. We can skip this if the
        # feature is disabled since they won't be saved.
        if features.has('organizations:event-attachments',
                        project.organization, actor=request.user):
            for name, file in six.iteritems(request.FILES):
                if name == 'upload_file_minidump':
                    continue
                # Known attachment: msgpack event
                if name == "__sentry-event":
                    merge_attached_event(file, data)
                    continue
                if name == "__sentry-breadcrumb1" or name == "__sentry-breadcrumb2":
                    merge_attached_breadcrumbs(file, data)
                    continue

                # Add any other file as attachment
                attachments.append(CachedAttachment.from_upload(file))

        try:
            state = process_minidump(minidump)
            merge_process_state_event(data, state)
        except ProcessMinidumpError as e:
            minidumps_logger.exception(e)
            raise APIError(e.message.split('\n', 1)[0])

        event_id = self.process(
            request,
            attachments=attachments,
            data=data,
            project=project,
            **kwargs)

        # Return the formatted UUID of the generated event. This is
        # expected by the Electron http uploader on Linux and doesn't
        # break the default Breakpad client library.
        return HttpResponse(
            six.text_type(uuid.UUID(event_id)),
            content_type='text/plain'
        )
Example #34
0
 def __decode(self, value):
     return Report(*json.loads(zlib.decompress(value)))
Example #35
0
 def test_bot_message_im(self):
     resp = self.post_webhook(event_data=json.loads(MESSAGE_IM_BOT_EVENT))
     assert resp.status_code == 200, resp.content
Example #36
0
class GitHubAppsProviderTest(PluginTestCase):
    @fixture
    def provider(self):
        return GitHubAppsRepositoryProvider("github_apps")

    @patch.object(
        GitHubAppsClient,
        "get_repositories",
        return_value=json.loads(INTSTALLATION_REPOSITORIES_API_RESPONSE),
    )
    @patch.object(GitHubClient,
                  "get_installations",
                  return_value=json.loads(LIST_INSTALLATION_API_RESPONSE))
    def test_link_auth(self, *args):
        user = self.create_user()
        organization = self.create_organization()
        UserSocialAuth.objects.create(user=user,
                                      provider="github_apps",
                                      extra_data={"access_token": "abcdefg"})

        integration = Integration.objects.create(provider="github_apps",
                                                 external_id="1")

        self.provider.link_auth(user, organization,
                                {"integration_id": integration.id})

        assert OrganizationIntegration.objects.filter(
            organization=organization, integration=integration).exists()

    def test_delete_repository(self):
        user = self.create_user()
        organization = self.create_organization()
        integration = Integration.objects.create(provider="github_apps",
                                                 external_id="1")
        repo = Repository.objects.create(
            name="example-repo",
            provider="github_apps",
            organization_id=organization.id,
            integration_id=integration.id,
        )

        # just check that it doesn't throw / try to delete a webhook
        assert self.provider.delete_repository(repo=repo, actor=user) is None

    @patch.object(GitHubAppsClient, "get_last_commits", return_value=[])
    def test_compare_commits_no_start(self, mock_get_last_commits):
        organization = self.create_organization()
        integration = Integration.objects.create(provider="github_apps",
                                                 external_id="1")
        repo = Repository.objects.create(
            name="example-repo",
            provider="github_apps",
            organization_id=organization.id,
            integration_id=integration.id,
            config={"name": "example-repo"},
        )

        self.provider.compare_commits(repo, None, "a" * 40)

        assert mock_get_last_commits.called

    @patch.object(GitHubAppsClient,
                  "compare_commits",
                  return_value={"commits": []})
    def test_compare_commits(self, mock_compare_commits):
        organization = self.create_organization()
        integration = Integration.objects.create(provider="github_apps",
                                                 external_id="1")
        repo = Repository.objects.create(
            name="example-repo",
            provider="github_apps",
            organization_id=organization.id,
            integration_id=integration.id,
            config={"name": "example-repo"},
        )

        self.provider.compare_commits(repo, "b" * 40, "a" * 40)

        assert mock_compare_commits.called
Example #37
0
def process_event(message, projects):
    payload = message["payload"]
    start_time = float(message["start_time"])
    event_id = message["event_id"]
    project_id = int(message["project_id"])
    remote_addr = message.get("remote_addr")
    attachments = message.get("attachments") or ()

    # check that we haven't already processed this event (a previous instance of the forwarder
    # died before it could commit the event queue offset)
    deduplication_key = "ev:{}:{}".format(project_id, event_id)
    if cache.get(deduplication_key) is not None:
        logger.warning(
            "pre-process-forwarder detected a duplicated event"
            " with id:%s for project:%s.",
            event_id,
            project_id,
        )
        return  # message already processed do not reprocess

    try:
        project = projects[project_id]
    except KeyError:
        logger.error("Project for ingested event does not exist: %s",
                     project_id)
        return

    # Parse the JSON payload. This is required to compute the cache key and
    # call process_event. The payload will be put into Kafka raw, to avoid
    # serializing it again.
    # XXX: Do not use CanonicalKeyDict here. This may break preprocess_event
    # which assumes that data passed in is a raw dictionary.
    data = json.loads(payload)

    cache_key = cache_key_for_event(data)
    default_cache.set(cache_key, data, CACHE_TIMEOUT)

    if attachments:
        attachment_objects = [
            CachedAttachment(type=attachment.pop("attachment_type"),
                             **attachment) for attachment in attachments
        ]

        attachment_cache.set(cache_key,
                             attachments=attachment_objects,
                             timeout=CACHE_TIMEOUT)

    # Preprocess this event, which spawns either process_event or
    # save_event. Pass data explicitly to avoid fetching it again from the
    # cache.
    preprocess_event(cache_key=cache_key,
                     data=data,
                     start_time=start_time,
                     event_id=event_id,
                     project=project)

    # remember for an 1 hour that we saved this event (deduplication protection)
    cache.set(deduplication_key, "", CACHE_TIMEOUT)

    # emit event_accepted once everything is done
    event_accepted.send_robust(ip=remote_addr,
                               data=data,
                               project=project,
                               sender=process_event)
Example #38
0
 def request_callback(request):
     payload = json.loads(request.body)
     status_code = 400 if payload.get("refs") else 200
     return (status_code, {}, json.dumps({}))
Example #39
0
def load_data(platform, default=None):
    # NOTE: Before editing this data, make sure you understand the context
    # in which its being used. It is NOT only used for local development and
    # has production consequences.
    #   * bin/load-mocks to generate fake data for local testing
    #   * When a new project is created, a fake event is generated as a "starter"
    #     event so it's not an empty project.
    #   * When a user clicks Test Configuration from notification plugin settings page,
    #     a fake event is generated to go through the pipeline.

    # now = datetime.now()

    data = None
    for platform in (platform, default):
        if platform is None:
            continue

        json_path = os.path.join(DATA_ROOT, 'samples',
                                 '%s.json' % (platform.encode('utf-8'), ))

        if not os.path.exists(json_path):
            continue

        with open(json_path) as fp:
            data = json.loads(fp.read())
            break

    if data is None:
        return

    if platform == 'csp':
        return data

    data['platform'] = platform
    data['message'] = 'This is an example %s exception' % (platform, )
    data['sentry.interfaces.User'] = {
        "username": "******",
        "id": "1671",
        "email": "*****@*****.**"
    }
    data['extra'] = {
        'session': {
            'foo': 'bar',
        },
        'results': [1, 2, 3, 4, 5],
        'emptyList': [],
        'emptyMap': {},
        'length': 10837790,
        'unauthorized': False,
        'url': 'http://example.org/foo/bar/',
    }
    data['modules'] = {
        'my.package': '1.0.0',
    }
    data['sentry.interfaces.Http'] = {
        "cookies": 'foo=bar;biz=baz',
        "url": "http://example.com/foo",
        "headers": {
            "Referer":
            "http://example.com",
            "Content-Type":
            "application/json",
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36"
        },
        "env": {
            'ENV': 'prod',
        },
        "query_string": "foo=bar",
        "data": '{"hello": "world"}',
        "method": "GET"
    }

    # We can't send Breadcrumb data as a part of the sample event.
    # This gets used for all new projects as the "starter" event.
    #
    # data['sentry.interfaces.Breadcrumbs'] = {
    #     "values": [
    #         {
    #             "type": "navigation",
    #             "dt": 8200,
    #             "timestamp": milliseconds_ago(now, 5200),
    #             "data": {
    #                 "to": "http://example.com/dashboard/",
    #                 "from": "http://example.com/login/"
    #             }
    #         },
    #         {
    #             "type": "message",
    #             "dt": 5000,
    #             "timestamp": milliseconds_ago(now, 4000),
    #             "data": {
    #                 "message": "This is a message breadcrumb",
    #                 "level": "info"
    #             }
    #         },
    #         {
    #             "type": "message",
    #             "dt": 4000,
    #             "timestamp": milliseconds_ago(now, 3300),
    #             "data": {
    #                 "message": "This is a warning message",
    #                 "level": "warning"
    #             }
    #         },
    #         {
    #             "type": "message",
    #             "dt": 3500,
    #             "timestamp": milliseconds_ago(now, 2700),
    #             "data": {
    #                 "message": "This is an error message",
    #                 "level": "error"
    #             }
    #         },
    #         {
    #             "type": "http_request",
    #             "dt": 3000,
    #             "timestamp": milliseconds_ago(now, 1300),
    #             "data": {
    #                 "url": "http://example.com/foo",
    #                 "statusCode": 200,
    #                 "method": "POST",
    #                 "headers": {
    #                     "Referer": "http://example.com",
    #                     "Content-Type": "application/json"
    #                 }
    #             }
    #         },
    #         {
    #             "type": "ui_event",
    #             "dt": 1500,
    #             "timestamp": milliseconds_ago(now, 1000),
    #             "data": {
    #                 "type": "click",
    #                 "target": "<button name=\"submit\" class=\"btn btn-small\"/>"
    #             }
    #         }
    #     ]
    # }

    return data
Example #40
0
 def process_refresh_token_response(cls, response):
     return json.loads(response)
Example #41
0
def raw_query(start,
              end,
              groupby=None,
              conditions=None,
              filter_keys=None,
              aggregations=None,
              rollup=None,
              arrayjoin=None,
              limit=None,
              offset=None,
              orderby=None,
              having=None,
              referrer=None,
              is_grouprelease=False,
              selected_columns=None,
              totals=None):
    """
    Sends a query to snuba.

    `conditions`: A list of (column, operator, literal) conditions to be passed
    to the query. Conditions that we know will not have to be translated should
    be passed this way (eg tag[foo] = bar).

    `filter_keys`: A dictionary of {col: [key, ...]} that will be converted
    into "col IN (key, ...)" conditions. These are used to restrict the query to
    known sets of project/issue/environment/release etc. Appropriate
    translations (eg. from environment model ID to environment name) are
    performed on the query, and the inverse translation performed on the
    result. The project_id(s) to restrict the query to will also be
    automatically inferred from these keys.

    `aggregations` a list of (aggregation_function, column, alias) tuples to be
    passed to the query.
    """

    # convert to naive UTC datetimes, as Snuba only deals in UTC
    # and this avoids offset-naive and offset-aware issues
    start = naiveify_datetime(start)
    end = naiveify_datetime(end)

    groupby = groupby or []
    conditions = conditions or []
    having = having or []
    aggregations = aggregations or []
    filter_keys = filter_keys or {}
    selected_columns = selected_columns or []

    with timer('get_snuba_map'):
        forward, reverse = get_snuba_translators(
            filter_keys, is_grouprelease=is_grouprelease)

    if 'project_id' in filter_keys:
        # If we are given a set of project ids, use those directly.
        project_ids = filter_keys['project_id']
    elif filter_keys:
        # Otherwise infer the project_ids from any related models
        with timer('get_related_project_ids'):
            ids = [
                get_related_project_ids(k, filter_keys[k]) for k in filter_keys
            ]
            project_ids = list(set.union(*map(set, ids)))
    else:
        project_ids = []

    for col, keys in six.iteritems(forward(filter_keys.copy())):
        if keys:
            if len(keys) == 1 and keys[0] is None:
                conditions.append((col, 'IS NULL', None))
            else:
                conditions.append((col, 'IN', keys))

    if not project_ids:
        raise SnubaError(
            "No project_id filter, or none could be inferred from other filters."
        )

    # any project will do, as they should all be from the same organization
    project = Project.objects.get(pk=project_ids[0])
    retention = quotas.get_event_retention(
        organization=Organization(project.organization_id))
    if retention:
        start = max(start, datetime.utcnow() - timedelta(days=retention))
        if start > end:
            raise QueryOutsideRetentionError

    # If the grouping, aggregation, or any of the conditions reference `issue`
    # we need to fetch the issue definitions (issue -> fingerprint hashes)
    aggregate_cols = [a[1] for a in aggregations]
    condition_cols = all_referenced_columns(conditions)
    all_cols = groupby + aggregate_cols + condition_cols + selected_columns
    get_issues = 'issue' in all_cols

    with timer('get_project_issues'):
        issues = get_project_issues(
            project_ids, filter_keys.get('issue')) if get_issues else None

    start, end = shrink_time_window(issues, start, end)

    # if `shrink_time_window` pushed `start` after `end` it means the user queried
    # a Group for T1 to T2 when the group was only active for T3 to T4, so the query
    # wouldn't return any results anyway
    if start > end:
        raise QueryOutsideGroupActivityError

    request = {
        k: v
        for k, v in six.iteritems({
            'from_date': start.isoformat(),
            'to_date': end.isoformat(),
            'conditions': conditions,
            'having': having,
            'groupby': groupby,
            'totals': totals,
            'project': project_ids,
            'aggregations': aggregations,
            'granularity': rollup,
            'issues': issues,
            'arrayjoin': arrayjoin,
            'limit': limit,
            'offset': offset,
            'orderby': orderby,
            'selected_columns': selected_columns,
        }) if v is not None
    }

    headers = {}
    if referrer:
        headers['referer'] = referrer

    try:
        with timer('snuba_query'):
            response = _snuba_pool.urlopen('POST',
                                           '/query',
                                           body=json.dumps(request),
                                           headers=headers)
    except urllib3.exceptions.HTTPError as err:
        raise SnubaError(err)

    try:
        body = json.loads(response.data)
    except ValueError:
        raise SnubaError(u"Could not decode JSON response: {}".format(
            response.data))

    if response.status != 200:
        if body.get('error'):
            raise SnubaError(body['error'])
        else:
            raise SnubaError(u'HTTP {}'.format(response.status))

    # Forward and reverse translation maps from model ids to snuba keys, per column
    body['data'] = [reverse(d) for d in body['data']]
    return body
Example #42
0
    def __decode(self, value):
        if value is None:
            return None

        return Report(*json.loads(zlib.decompress(value)))
Example #43
0
def test_produce_step() -> None:
    topic = Topic("snuba-metrics")
    partition = Partition(topic, 0)

    clock = Clock()
    broker_storage: MemoryMessageStorage[KafkaPayload] = MemoryMessageStorage()
    broker: Broker[KafkaPayload] = Broker(broker_storage, clock)
    broker.create_topic(topic, partitions=1)
    producer = broker.get_producer()

    commit = Mock()

    produce_step = ProduceStep(commit_function=commit, producer=producer)

    message_payloads = [counter_payload, distribution_payload, set_payload]
    message_batch = [
        Message(
            Partition(Topic("topic"), 0),
            i + 1,
            KafkaPayload(
                None,
                json.dumps(__translated_payload(
                    message_payloads[i])).encode("utf-8"), []),
            datetime.now(),
        ) for i, payload in enumerate(message_payloads)
    ]
    # the outer message uses the last message's partition, offset, and timestamp
    last = message_batch[-1]
    outer_message = Message(last.partition, last.offset, message_batch,
                            last.timestamp)

    # 1. Submit the message (that would have been generated from process_messages)
    produce_step.submit(outer_message=outer_message)

    # 2. Check that submit created the same number of futures as
    #    messages in the outer_message (3 in this test). Also check
    #    that the produced message payloads are as expected.
    assert len(produce_step._ProduceStep__futures) == 3

    first_message = broker_storage.consume(partition, 0)
    assert first_message is not None

    second_message = broker_storage.consume(partition, 1)
    assert second_message is not None

    third_message = broker_storage.consume(partition, 2)
    assert third_message is not None

    assert broker_storage.consume(partition, 3) is None

    produced_messages = [
        json.loads(msg.payload.value.decode("utf-8"), use_rapid_json=True)
        for msg in [first_message, second_message, third_message]
    ]
    expected_produced_messages = []
    for payload in message_payloads:
        translated = __translated_payload(payload)
        tags: Mapping[str, int] = {
            str(k): v
            for k, v in translated["tags"].items()
        }
        translated.update(**{"tags": tags})
        expected_produced_messages.append(translated)

    assert produced_messages == expected_produced_messages

    # 3. Call poll method, and check that doing so checked that
    #    futures were ready and successful and therefore messages
    #    were committed.
    produce_step.poll()
    expected_commit_calls = [
        call({message.partition: Position(message.offset, message.timestamp)})
        for message in message_batch
    ]
    assert commit.call_args_list == expected_commit_calls

    produce_step.close()
    produce_step.join()
Example #44
0
    def test_limit_as_2_with_paging(self):
        self.setup_project_and_rules()

        # Test Limit as 2, no cursor:
        with self.feature(
            ["organizations:incidents", "organizations:performance-view"]):
            request_data = {"per_page": "2", "project": self.project_ids}
            response = self.client.get(path=self.combined_rules_url,
                                       data=request_data,
                                       content_type="application/json")
        assert response.status_code == 200

        result = json.loads(response.content)
        assert len(result) == 2
        self.assert_alert_rule_serialized(self.yet_another_alert_rule,
                                          result[0],
                                          skip_dates=True)
        assert result[1]["id"] == str(self.issue_rule.id)
        assert result[1]["type"] == "rule"

        links = requests.utils.parse_header_links(
            response.get("link").rstrip(">").replace(">,<", ",<"))
        next_cursor = links[1]["cursor"]
        # Test Limit 2, next page of previous request:
        with self.feature(
            ["organizations:incidents", "organizations:performance-view"]):
            request_data = {
                "cursor": next_cursor,
                "per_page": "2",
                "project": self.project_ids
            }
            response = self.client.get(path=self.combined_rules_url,
                                       data=request_data,
                                       content_type="application/json")
        assert response.status_code == 200

        result = json.loads(response.content)
        assert len(result) == 2
        self.assert_alert_rule_serialized(self.other_alert_rule,
                                          result[0],
                                          skip_dates=True)
        self.assert_alert_rule_serialized(self.alert_rule,
                                          result[1],
                                          skip_dates=True)

        links = requests.utils.parse_header_links(
            response.get("link").rstrip(">").replace(">,<", ",<"))
        next_cursor = links[1]["cursor"]

        # Test Limit 2, next page of previous request - should get no results since there are only 4 total:
        with self.feature(
            ["organizations:incidents", "organizations:performance-view"]):
            request_data = {
                "cursor": next_cursor,
                "per_page": "2",
                "project": self.project_ids
            }
            response = self.client.get(path=self.combined_rules_url,
                                       data=request_data,
                                       content_type="application/json")
        assert response.status_code == 200

        result = json.loads(response.content)
        assert len(result) == 0
Example #45
0
    def create_full_event(group, event_id='a', **kwargs):
        payload = """
            {
                "event_id": "f5dd88e612bc406ba89dfebd09120769",
                "project": 11276,
                "release": "e1b5d1900526feaf20fe2bc9cad83d392136030a",
                "platform": "javascript",
                "culprit": "app/components/events/eventEntries in map",
                "logentry": {"formatted": "TypeError: Cannot read property '1' of null"},
                "tags": [
                    ["environment", "prod"],
                    ["sentry_version", "e1b5d1900526feaf20fe2bc9cad83d392136030a"],
                    ["level", "error"],
                    ["logger", "javascript"],
                    ["sentry:release", "e1b5d1900526feaf20fe2bc9cad83d392136030a"],
                    ["browser", "Chrome 48.0"],
                    ["device", "Other"],
                    ["os", "Windows 10"],
                    ["url", "https://sentry.io/katon-direct/localhost/issues/112734598/"],
                    ["sentry:user", "id:41656"]
                ],
                "errors": [{
                    "url": "<anonymous>",
                    "type": "js_no_source"
                }],
                "extra": {
                    "session:duration": 40364
                },
                "exception": {
                    "exc_omitted": null,
                    "values": [{
                        "stacktrace": {
                            "frames": [{
                                "function": "batchedUpdates",
                                "abs_path": "webpack:////usr/src/getsentry/src/sentry/~/react/lib/ReactUpdates.js",
                                "pre_context": ["  // verify that that's the case. (This is called by each top-level update", "  // function, like setProps, setState, forceUpdate, etc.; creation and", "  // destruction of top-level components is guarded in ReactMount.)", "", "  if (!batchingStrategy.isBatchingUpdates) {"],
                                "post_context": ["    return;", "  }", "", "  dirtyComponents.push(component);", "}"],
                                "filename": "~/react/lib/ReactUpdates.js",
                                "module": "react/lib/ReactUpdates",
                                "colno": 0,
                                "in_app": false,
                                "data": {
                                    "orig_filename": "/_static/29e365f8b0d923bc123e8afa38d890c3/sentry/dist/vendor.js",
                                    "orig_abs_path": "https://media.sentry.io/_static/29e365f8b0d923bc123e8afa38d890c3/sentry/dist/vendor.js",
                                    "sourcemap": "https://media.sentry.io/_static/29e365f8b0d923bc123e8afa38d890c3/sentry/dist/vendor.js.map",
                                    "orig_lineno": 37,
                                    "orig_function": "Object.s [as enqueueUpdate]",
                                    "orig_colno": 16101
                                },
                                "context_line": "    batchingStrategy.batchedUpdates(enqueueUpdate, component);",
                                "lineno": 176
                            }],
                            "frames_omitted": null
                        },
                        "type": "TypeError",
                        "value": "Cannot read property '1' of null",
                        "module": null
                    }]
                },
                "request": {
                    "url": "https://sentry.io/katon-direct/localhost/issues/112734598/",
                    "headers": [
                        ["Referer", "https://sentry.io/welcome/"],
                        ["User-Agent", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.109 Safari/537.36"]
                    ]
                },
                "user": {
                    "ip_address": "0.0.0.0",
                    "id": "41656",
                    "email": "*****@*****.**"
                },
                "version": "7",
                "breadcrumbs": {
                    "values": [
                        {
                            "category": "xhr",
                            "timestamp": 1496395011.63,
                            "type": "http",
                            "data": {
                                "url": "/api/path/here",
                                "status_code": "500",
                                "method": "POST"
                            }
                        }
                    ]
                }
            }"""

        event = Factories.create_event(
            group=group,
            event_id=event_id,
            platform='javascript',
            data=json.loads(payload),

            # This payload already went through sourcemap
            # processing, normalizing it would remove
            # frame.data (orig_filename, etc)
            normalize=False)
        return event
def assert_response_json(response, data):
    """
    Normalizes unicode strings by encoding/decoding expected output
    """
    assert json.loads(response.content) == json.loads(json.dumps(data))
Example #47
0
def load_data(platform, default=None, sample_name=None):
    # NOTE: Before editing this data, make sure you understand the context
    # in which its being used. It is NOT only used for local development and
    # has production consequences.
    #   * bin/load-mocks to generate fake data for local testing
    #   * When a new project is created, a fake event is generated as a "starter"
    #     event so it's not an empty project.
    #   * When a user clicks Test Configuration from notification plugin settings page,
    #     a fake event is generated to go through the pipeline.
    data = None
    language = None
    platform_data = INTEGRATION_ID_TO_PLATFORM_DATA.get(platform)

    if platform_data is not None and platform_data["type"] != "language":
        language = platform_data["language"]

    for platform in (platform, language, default):
        if not platform:
            continue

        json_path = os.path.join(DATA_ROOT, "samples",
                                 "%s.json" % (platform.encode("utf-8"), ))
        if not os.path.exists(json_path):
            continue

        if not sample_name:
            try:
                sample_name = INTEGRATION_ID_TO_PLATFORM_DATA[platform]["name"]
            except KeyError:
                pass

        with open(json_path) as fp:
            data = json.loads(fp.read())
            break

    if data is None:
        return

    data = CanonicalKeyDict(data)
    if platform in ("csp", "hkpk", "expectct", "expectstaple"):
        return data

    # Transaction events need timestamp data set to something current.
    if platform == "transaction":
        now = timezone.now()
        now_time = to_timestamp(now)
        start_time = to_timestamp(now - timedelta(seconds=2))
        data.setdefault("timestamp", now_time)
        data.setdefault("start_timestamp", start_time)
        for span in data["spans"]:
            span.setdefault("timestamp", now_time)
            span.setdefault("start_timestamp", start_time)

    data["platform"] = platform
    # XXX: Message is a legacy alias for logentry. Do not overwrite if set.
    if "message" not in data:
        data["message"] = "This is an example %s exception" % (sample_name
                                                               or platform, )
    data.setdefault(
        "user",
        generate_user(ip_address="127.0.0.1",
                      username="******",
                      id=1,
                      email="*****@*****.**"),
    )
    data.setdefault(
        "extra",
        {
            "session": {
                "foo": "bar"
            },
            "results": [1, 2, 3, 4, 5],
            "emptyList": [],
            "emptyMap": {},
            "length": 10837790,
            "unauthorized": False,
            "url": "http://example.org/foo/bar/",
        },
    )
    data.setdefault("modules", {"my.package": "1.0.0"})
    data.setdefault(
        "request",
        {
            "cookies": "foo=bar;biz=baz",
            "url": "http://example.com/foo",
            "headers": {
                "Referer":
                "http://example.com",
                "Content-Type":
                "application/json",
                "User-Agent":
                "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36",
            },
            "env": {
                "ENV": "prod"
            },
            "query_string": "foo=bar",
            "data": '{"hello": "world"}',
            "method": "GET",
        },
    )

    return data
Example #48
0
def load_data(platform, default=None, sample_name=None):
    # NOTE: Before editing this data, make sure you understand the context
    # in which its being used. It is NOT only used for local development and
    # has production consequences.
    #   * bin/load-mocks to generate fake data for local testing
    #   * When a new project is created, a fake event is generated as a "starter"
    #     event so it's not an empty project.
    #   * When a user clicks Test Configuration from notification plugin settings page,
    #     a fake event is generated to go through the pipeline.
    data = None
    language = None
    platform_data = INTEGRATION_ID_TO_PLATFORM_DATA.get(platform)

    if platform_data is not None and platform_data['type'] != 'language':
        language = platform_data['language']

    for platform in (platform, language, default):
        if not platform:
            continue

        json_path = os.path.join(DATA_ROOT, 'samples', '%s.json' % (platform.encode('utf-8'), ))
        if not os.path.exists(json_path):
            continue

        if not sample_name:
            try:
                sample_name = INTEGRATION_ID_TO_PLATFORM_DATA[platform]['name']
            except KeyError:
                pass

        with open(json_path) as fp:
            data = json.loads(fp.read())
            break

    if data is None:
        return

    data = CanonicalKeyDict(data)
    if platform in ('csp', 'hkpk', 'expectct', 'expectstaple'):
        return data

    data['platform'] = platform
    data['message'] = 'This is an example %s exception' % (sample_name or platform, )
    data['user'] = generate_user(
        ip_address='127.0.0.1',
        username='******',
        id=1,
        email='*****@*****.**',
    )
    data['extra'] = {
        'session': {
            'foo': 'bar',
        },
        'results': [1, 2, 3, 4, 5],
        'emptyList': [],
        'emptyMap': {},
        'length': 10837790,
        'unauthorized': False,
        'url': 'http://example.org/foo/bar/',
    }
    data['modules'] = {
        'my.package': '1.0.0',
    }
    data['request'] = {
        "cookies": 'foo=bar;biz=baz',
        "url": "http://example.com/foo",
        "headers": {
            "Referer":
            "http://example.com",
            "Content-Type":
            "application/json",
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36"
        },
        "env": {
            'ENV': 'prod',
        },
        "query_string": "foo=bar",
        "data": '{"hello": "world"}',
        "method": "GET"
    }

    return data
Example #49
0
    def test_basic_resolving(self):
        url = reverse(
            "sentry-api-0-dsym-files",
            kwargs={
                "organization_slug": self.project.organization.slug,
                "project_slug": self.project.slug,
            },
        )

        self.login_as(user=self.user)

        out = BytesIO()
        f = zipfile.ZipFile(out, "w")
        f.writestr("proguard/%s.txt" % PROGUARD_UUID, PROGUARD_SOURCE)
        f.writestr("ignored-file.txt", b"This is just some stuff")
        f.close()

        response = self.client.post(
            url,
            {
                "file":
                SimpleUploadedFile("symbols.zip",
                                   out.getvalue(),
                                   content_type="application/zip")
            },
            format="multipart",
        )
        assert response.status_code == 201, response.content
        assert len(response.data) == 1

        event_data = {
            "user": {
                "ip_address": "31.172.207.97"
            },
            "extra": {},
            "project": self.project.id,
            "platform": "java",
            "debug_meta": {
                "images": [{
                    "type": "proguard",
                    "uuid": PROGUARD_UUID
                }]
            },
            "exception": {
                "values": [{
                    "stacktrace": {
                        "frames": [
                            {
                                "function": "a",
                                "abs_path": None,
                                "module": "org.a.b.g$a",
                                "filename": None,
                                "lineno": 67,
                            },
                            {
                                "function": "a",
                                "abs_path": None,
                                "module": "org.a.b.g$a",
                                "filename": None,
                                "lineno": 69,
                            },
                        ]
                    },
                    "type": "RuntimeException",
                    "value": "Shit broke yo",
                }]
            },
            "timestamp": iso_format(before_now(seconds=1)),
        }

        # We do a preflight post, because there are many queries polluting the array
        # before the actual "processing" happens (like, auth_user)
        resp = self._postWithHeader(event_data)
        with self.assertWriteQueries({
                "nodestore_node": 2,
                "sentry_eventuser": 1,
                "sentry_groupedmessage": 1,
                "sentry_userreport": 1,
        }):
            self._postWithHeader(event_data)
        assert resp.status_code == 200
        event_id = json.loads(resp.content)["id"]

        event = eventstore.get_event_by_id(self.project.id, event_id)
        bt = event.interfaces["exception"].values[0].stacktrace
        frames = bt.frames

        assert frames[0].function == "getClassContext"
        assert frames[
            0].module == "org.slf4j.helpers.Util$ClassContextSecurityManager"
        assert frames[1].function == "getExtraClassContext"
        assert frames[
            1].module == "org.slf4j.helpers.Util$ClassContextSecurityManager"

        assert event.culprit == (
            "org.slf4j.helpers.Util$ClassContextSecurityManager "
            "in getExtraClassContext")
Example #50
0
    def test_send_alert_event(self, safe_urlopen):
        event = self.store_event(data={}, project_id=self.project.id)
        group = event.group
        rule_future = RuleFuture(rule=self.rule,
                                 kwargs={"sentry_app": self.sentry_app})

        with self.tasks():
            notify_sentry_app(event, [rule_future])

        data = json.loads(faux(safe_urlopen).kwargs["data"])

        assert data == {
            "action": "triggered",
            "installation": {
                "uuid": self.install.uuid
            },
            "data": {
                "event":
                DictContaining(
                    event_id=event.event_id,
                    url=absolute_uri(
                        reverse(
                            "sentry-api-0-project-event-details",
                            args=[
                                self.organization.slug, self.project.slug,
                                event.event_id
                            ],
                        )),
                    web_url=absolute_uri(
                        reverse(
                            "sentry-organization-event-detail",
                            args=[
                                self.organization.slug, group.id,
                                event.event_id
                            ],
                        )),
                    issue_url=absolute_uri(f"/api/0/issues/{group.id}/"),
                    issue_id=str(group.id),
                ),
                "triggered_rule":
                self.rule.label,
            },
            "actor": {
                "type": "application",
                "id": "sentry",
                "name": "Sentry"
            },
        }

        assert faux(safe_urlopen).kwarg_equals(
            "headers",
            DictContaining(
                "Content-Type",
                "Request-ID",
                "Sentry-Hook-Resource",
                "Sentry-Hook-Timestamp",
                "Sentry-Hook-Signature",
            ),
        )

        buffer = SentryAppWebhookRequestsBuffer(self.sentry_app)
        requests = buffer.get_requests()

        assert len(requests) == 1
        assert requests[0]["response_code"] == 200
        assert requests[0]["event_type"] == "event_alert.triggered"
Example #51
0
    def test_issue_alert_team_fallback(self, mock_func):
        """Test that issue alerts are sent to each member of a team in Slack."""

        user2 = self.create_user(is_superuser=False)
        self.create_member(teams=[self.team],
                           user=user2,
                           organization=self.organization)
        self.identity = Identity.objects.create(
            external_id="UXXXXXXX2",
            idp=self.idp,
            user=user2,
            status=IdentityStatus.VALID,
            scopes=[],
        )
        NotificationSetting.objects.update_settings(
            ExternalProviders.SLACK,
            NotificationSettingTypes.ISSUE_ALERTS,
            NotificationSettingOptionValues.ALWAYS,
            user=user2,
        )

        event = self.store_event(data={
            "message": "Hello world",
            "level": "error"
        },
                                 project_id=self.project.id)
        action_data = {
            "id": "sentry.mail.actions.NotifyEmailAction",
            "targetType": "Team",
            "targetIdentifier": str(self.team.id),
        }
        rule = Rule.objects.create(
            project=self.project,
            label="ja rule",
            data={
                "match": "all",
                "actions": [action_data],
            },
        )
        notification = Notification(event=event, rule=rule)

        with self.options({"system.url-prefix":
                           "http://example.com"}), self.tasks():
            self.adapter.notify(notification, ActionTargetType.TEAM,
                                self.team.id)

        assert len(responses.calls) == 2

        # check that self.user got a notification
        data = parse_qs(responses.calls[0].request.body)
        assert data["channel"] == ["UXXXXXXX1"]
        assert "attachments" in data
        attachments = json.loads(data["attachments"][0])
        assert len(attachments) == 1
        assert attachments[0]["title"] == "Hello world"
        assert (
            attachments[0]["footer"] ==
            f"{self.project.slug} | <http://example.com/settings/account/notifications/alerts/?referrer=AlertRuleSlack|Notification Settings>"
        )

        # check that user2 got a notification as well
        data2 = parse_qs(responses.calls[1].request.body)
        assert data2["channel"] == ["UXXXXXXX2"]
        assert "attachments" in data2
        attachments = json.loads(data2["attachments"][0])
        assert len(attachments) == 1
        assert attachments[0]["title"] == "Hello world"
        assert (
            attachments[0]["footer"] ==
            f"{self.project.slug} | <http://example.com/settings/account/notifications/alerts/?referrer=AlertRuleSlack|Notification Settings>"
        )
Example #52
0
    def test_error_on_resolving(self):
        url = reverse(
            "sentry-api-0-dsym-files",
            kwargs={
                "organization_slug": self.project.organization.slug,
                "project_slug": self.project.slug,
            },
        )

        self.login_as(user=self.user)

        out = BytesIO()
        f = zipfile.ZipFile(out, "w")
        f.writestr("proguard/%s.txt" % PROGUARD_BUG_UUID, PROGUARD_BUG_SOURCE)
        f.close()

        response = self.client.post(
            url,
            {
                "file":
                SimpleUploadedFile("symbols.zip",
                                   out.getvalue(),
                                   content_type="application/zip")
            },
            format="multipart",
        )
        assert response.status_code == 201, response.content
        assert len(response.data) == 1

        event_data = {
            "user": {
                "ip_address": "31.172.207.97"
            },
            "extra": {},
            "project": self.project.id,
            "platform": "java",
            "debug_meta": {
                "images": [{
                    "type": "proguard",
                    "uuid": PROGUARD_BUG_UUID
                }]
            },
            "exception": {
                "values": [{
                    "stacktrace": {
                        "frames": [
                            {
                                "function": "a",
                                "abs_path": None,
                                "module": "org.a.b.g$a",
                                "filename": None,
                                "lineno": 67,
                            },
                            {
                                "function": "a",
                                "abs_path": None,
                                "module": "org.a.b.g$a",
                                "filename": None,
                                "lineno": 69,
                            },
                        ]
                    },
                    "type": "RuntimeException",
                    "value": "Shit broke yo",
                }]
            },
            "timestamp": iso_format(before_now(seconds=1)),
        }

        resp = self._postWithHeader(event_data)
        assert resp.status_code == 200
        event_id = json.loads(resp.content)["id"]

        event = eventstore.get_event_by_id(self.project.id, event_id)

        assert len(event.data["errors"]) == 1
        assert event.data["errors"][0] == {
            "mapping_uuid": u"071207ac-b491-4a74-957c-2c94fd9594f2",
            "type": "proguard_missing_lineno",
        }
Example #53
0
    def test_sync_assignee_outbound_with_paging(self):
        vsts_work_item_id = 5
        responses.add(
            responses.PATCH,
            "https://fabrikam-fiber-inc.visualstudio.com/_apis/wit/workitems/%d"
            % vsts_work_item_id,
            body=WORK_ITEM_RESPONSE,
            content_type="application/json",
        )
        responses.add(
            responses.GET,
            "https://fabrikam-fiber-inc.vssps.visualstudio.com/_apis/graph/users",
            json={
                "value": [
                    {
                        "mailAddress": "*****@*****.**"
                    },
                    {
                        "mailAddress": "*****@*****.**"
                    },
                    {
                        "mailAddress": "*****@*****.**"
                    },
                ]
            },
            headers={"X-MS-ContinuationToken": "continuation-token"},
            match_querystring=True,
        )
        responses.add(
            responses.GET,
            "https://fabrikam-fiber-inc.vssps.visualstudio.com/_apis/graph/users?continuationToken=continuation-token",
            body=GET_USERS_RESPONSE,
            content_type="application/json",
            match_querystring=True,
        )

        user = self.create_user("*****@*****.**")
        external_issue = ExternalIssue.objects.create(
            organization_id=self.organization.id,
            integration_id=self.integration.model.id,
            key=vsts_work_item_id,
            title="I'm a title!",
            description="I'm a description.",
        )
        self.integration.sync_assignee_outbound(external_issue,
                                                user,
                                                assign=True)
        assert len(responses.calls) == 3
        assert (
            responses.calls[0].request.url ==
            "https://fabrikam-fiber-inc.vssps.visualstudio.com/_apis/graph/users"
        )
        assert responses.calls[0].response.status_code == 200

        assert (
            responses.calls[1].request.url ==
            "https://fabrikam-fiber-inc.vssps.visualstudio.com/_apis/graph/users?continuationToken=continuation-token"
        )
        assert responses.calls[1].response.status_code == 200

        assert (
            responses.calls[2].request.url ==
            "https://fabrikam-fiber-inc.visualstudio.com/_apis/wit/workitems/%d"
            % vsts_work_item_id)
        request_body = json.loads(responses.calls[2].request.body)
        assert len(request_body) == 1
        assert request_body[0]["path"] == "/fields/System.AssignedTo"
        assert request_body[0]["value"] == "*****@*****.**"
        assert request_body[0]["op"] == "replace"
        assert responses.calls[2].response.status_code == 200
Example #54
0
def raw_query(start,
              end,
              groupby=None,
              conditions=None,
              filter_keys=None,
              aggregations=None,
              rollup=None,
              referrer=None,
              is_grouprelease=False,
              **kwargs):
    """
    Sends a query to snuba.

    `start` and `end`: The beginning and end of the query time window (required)

    `groupby`: A list of column names to group by.

    `conditions`: A list of (column, operator, literal) conditions to be passed
    to the query. Conditions that we know will not have to be translated should
    be passed this way (eg tag[foo] = bar).

    `filter_keys`: A dictionary of {col: [key, ...]} that will be converted
    into "col IN (key, ...)" conditions. These are used to restrict the query to
    known sets of project/issue/environment/release etc. Appropriate
    translations (eg. from environment model ID to environment name) are
    performed on the query, and the inverse translation performed on the
    result. The project_id(s) to restrict the query to will also be
    automatically inferred from these keys.

    `aggregations` a list of (aggregation_function, column, alias) tuples to be
    passed to the query.

    The rest of the args are passed directly into the query JSON unmodified.
    See the snuba schema for details.
    """

    # convert to naive UTC datetimes, as Snuba only deals in UTC
    # and this avoids offset-naive and offset-aware issues
    start = naiveify_datetime(start)
    end = naiveify_datetime(end)

    groupby = groupby or []
    conditions = conditions or []
    aggregations = aggregations or []
    filter_keys = filter_keys or {}

    with timer('get_snuba_map'):
        forward, reverse = get_snuba_translators(
            filter_keys, is_grouprelease=is_grouprelease)

    if 'project_id' in filter_keys:
        # If we are given a set of project ids, use those directly.
        project_ids = list(set(filter_keys['project_id']))
    elif filter_keys:
        # Otherwise infer the project_ids from any related models
        with timer('get_related_project_ids'):
            ids = [
                get_related_project_ids(k, filter_keys[k]) for k in filter_keys
            ]
            project_ids = list(set.union(*map(set, ids)))
    else:
        project_ids = []

    for col, keys in six.iteritems(forward(filter_keys.copy())):
        if keys:
            if len(keys) == 1 and None in keys:
                conditions.append((col, 'IS NULL', None))
            else:
                conditions.append((col, 'IN', keys))

    if not project_ids:
        raise UnqualifiedQueryError(
            "No project_id filter, or none could be inferred from other filters."
        )

    # any project will do, as they should all be from the same organization
    project = Project.objects.get(pk=project_ids[0])
    retention = quotas.get_event_retention(
        organization=Organization(project.organization_id))
    if retention:
        start = max(start, datetime.utcnow() - timedelta(days=retention))
        if start > end:
            raise QueryOutsideRetentionError

    # if `shrink_time_window` pushed `start` after `end` it means the user queried
    # a Group for T1 to T2 when the group was only active for T3 to T4, so the query
    # wouldn't return any results anyway
    new_start = shrink_time_window(filter_keys.get('issue'), start)

    # TODO (alexh) this is a quick emergency fix for an occasion where a search
    # results in only 1 django candidate, which is then passed to snuba to
    # check and we raised because of it. Remove this once we figure out why the
    # candidate was returned from django at all if it existed only outside the
    # time range of the query
    if new_start <= end:
        start = new_start

    if start > end:
        raise QueryOutsideGroupActivityError

    kwargs.update({
        'from_date': start.isoformat(),
        'to_date': end.isoformat(),
        'groupby': groupby,
        'conditions': conditions,
        'aggregations': aggregations,
        'project': project_ids,
        'granularity': rollup,  # TODO name these things the same
    })
    kwargs = {k: v for k, v in six.iteritems(kwargs) if v is not None}

    kwargs.update(OVERRIDE_OPTIONS)

    headers = {}
    if referrer:
        headers['referer'] = referrer

    try:
        with timer('snuba_query'):
            response = _snuba_pool.urlopen('POST',
                                           '/query',
                                           body=json.dumps(kwargs),
                                           headers=headers)
    except urllib3.exceptions.HTTPError as err:
        raise SnubaError(err)

    try:
        body = json.loads(response.data)
    except ValueError:
        raise UnexpectedResponseError(
            u"Could not decode JSON response: {}".format(response.data))

    if response.status != 200:
        if body.get('error'):
            error = body['error']
            if response.status == 429:
                raise RateLimitExceeded(error['message'])
            elif error['type'] == 'schema':
                raise SchemaValidationError(error['message'])
            elif error['type'] == 'clickhouse':
                raise clickhouse_error_codes_map.get(
                    error['code'],
                    QueryExecutionError,
                )(error['message'])
            else:
                raise SnubaError(error['message'])
        else:
            raise SnubaError(u'HTTP {}'.format(response.status))

    # Forward and reverse translation maps from model ids to snuba keys, per column
    body['data'] = [reverse(d) for d in body['data']]
    return body
    def post(self, request, organization, version):
        """
        Handle an artifact bundle and merge it into the release
        ```````````````````````````````````````````````````````

        :auth: required
        """

        try:
            release = Release.objects.get(organization_id=organization.id, version=version)
        except Release.DoesNotExist:
            raise ResourceDoesNotExist

        if not self.has_release_permission(request, organization, release):
            raise ResourceDoesNotExist

        schema = {
            "type": "object",
            "properties": {
                "checksum": {"type": "string", "pattern": "^[0-9a-f]{40}$"},
                "chunks": {
                    "type": "array",
                    "items": {"type": "string", "pattern": "^[0-9a-f]{40}$"},
                },
            },
            "required": ["checksum", "chunks"],
            "additionalProperties": False,
        }

        try:
            data = json.loads(request.body)
            jsonschema.validate(data, schema)
        except jsonschema.ValidationError as e:
            return Response({"error": str(e).splitlines()[0]}, status=400)
        except BaseException:
            return Response({"error": "Invalid json body"}, status=400)

        checksum = data.get("checksum", None)
        chunks = data.get("chunks", [])

        state, detail = get_assemble_status(AssembleTask.ARTIFACTS, organization.id, checksum)
        if state == ChunkFileState.OK:
            return Response({"state": state, "detail": None, "missingChunks": []}, status=200)
        elif state is not None:
            return Response({"state": state, "detail": detail, "missingChunks": []})

        # There is neither a known file nor a cached state, so we will
        # have to create a new file.  Assure that there are checksums.
        # If not, we assume this is a poll and report NOT_FOUND
        if not chunks:
            return Response({"state": ChunkFileState.NOT_FOUND, "missingChunks": []}, status=200)

        set_assemble_status(
            AssembleTask.ARTIFACTS, organization.id, checksum, ChunkFileState.CREATED
        )

        from sentry.tasks.assemble import assemble_artifacts

        assemble_artifacts.apply_async(
            kwargs={
                "org_id": organization.id,
                "version": version,
                "checksum": checksum,
                "chunks": chunks,
            }
        )

        return Response({"state": ChunkFileState.CREATED, "missingChunks": []}, status=200)
Example #56
0
 def _read_manifest(self) -> dict:
     manifest_bytes = self.read("manifest.json")
     return json.loads(manifest_bytes.decode("utf-8"))
Example #57
0
    def test_issue_alert_team_new_project(self, mock_func):
        """Test that issue alerts are sent to a team in Slack when the team has added a new project"""

        # add a second user to the team so we can be sure it's only
        # sent once (to the team, and not to each individual user)
        user2 = self.create_user(is_superuser=False)
        self.create_member(teams=[self.team],
                           user=user2,
                           organization=self.organization)
        self.idp = IdentityProvider.objects.create(type="slack",
                                                   external_id="TXXXXXXX2",
                                                   config={})
        self.identity = Identity.objects.create(
            external_id="UXXXXXXX2",
            idp=self.idp,
            user=user2,
            status=IdentityStatus.VALID,
            scopes=[],
        )
        NotificationSetting.objects.update_settings(
            ExternalProviders.SLACK,
            NotificationSettingTypes.ISSUE_ALERTS,
            NotificationSettingOptionValues.ALWAYS,
            user=user2,
        )
        # update the team's notification settings
        ExternalActor.objects.create(
            actor=self.team.actor,
            organization=self.organization,
            integration=self.integration,
            provider=ExternalProviders.SLACK.value,
            external_name="goma",
            external_id="CXXXXXXX2",
        )
        NotificationSetting.objects.update_settings(
            ExternalProviders.SLACK,
            NotificationSettingTypes.ISSUE_ALERTS,
            NotificationSettingOptionValues.ALWAYS,
            team=self.team,
        )
        # add a new project
        project2 = self.create_project(name="hellboy",
                                       organization=self.organization,
                                       teams=[self.team])

        event = self.store_event(data={
            "message": "Hello world",
            "level": "error"
        },
                                 project_id=project2.id)
        action_data = {
            "id": "sentry.mail.actions.NotifyEmailAction",
            "targetType": "Team",
            "targetIdentifier": str(self.team.id),
        }
        rule = Rule.objects.create(
            project=project2,
            label="ja rule",
            data={
                "match": "all",
                "actions": [action_data],
            },
        )
        notification = Notification(event=event, rule=rule)

        with self.options({"system.url-prefix":
                           "http://example.com"}), self.tasks():
            self.adapter.notify(notification, ActionTargetType.TEAM,
                                self.team.id)

        # check that only one was sent out - more would mean each user is being notified
        # rather than the team
        assert len(responses.calls) == 1

        # check that the team got a notification
        data = parse_qs(responses.calls[0].request.body)
        assert data["channel"] == ["CXXXXXXX2"]
        assert "attachments" in data
        attachments = json.loads(data["attachments"][0])
        assert len(attachments) == 1
        assert attachments[0]["title"] == "Hello world"
        assert (
            attachments[0]["footer"] ==
            f"{project2.slug} | <http://example.com/settings/{self.organization.slug}/teams/{self.team.slug}/notifications/?referrer=AlertRuleSlack|Notification Settings>"
        )
Example #58
0
 def get_value(self):
     key = self._get_redis_key()
     value = self.client.get(key)
     return json.loads(value)
Example #59
0
def load_data(platform, default=None, timestamp=None):
    # NOTE: Before editing this data, make sure you understand the context
    # in which its being used. It is NOT only used for local development and
    # has production consequences.
    #   * bin/load-mocks to generate fake data for local testing
    #   * When a new project is created, a fake event is generated as a "starter"
    #     event so it's not an empty project.
    #   * When a user clicks Test Configuration from notification plugin settings page,
    #     a fake event is generated to go through the pipeline.

    data = None
    for platform in (platform, default):
        if platform is None:
            continue

        json_path = os.path.join(DATA_ROOT, 'samples',
                                 '%s.json' % (platform.encode('utf-8'), ))

        if not os.path.exists(json_path):
            continue

        with open(json_path) as fp:
            data = json.loads(fp.read())
            break

    if data is None:
        return

    if platform == 'csp':
        return data

    data['platform'] = platform
    data['message'] = 'This is an example %s exception' % (platform, )
    data['sentry.interfaces.User'] = {
        "username": "******",
        "id": "1671",
        "email": "*****@*****.**"
    }
    data['extra'] = {
        'session': {
            'foo': 'bar',
        },
        'results': [1, 2, 3, 4, 5],
        'emptyList': [],
        'emptyMap': {},
        'length': 10837790,
        'unauthorized': False,
        'url': 'http://example.org/foo/bar/',
    }
    data['modules'] = {
        'my.package': '1.0.0',
    }
    data['sentry.interfaces.Http'] = {
        "cookies": 'foo=bar;biz=baz',
        "url": "http://example.com/foo",
        "headers": {
            "Referer":
            "http://example.com",
            "Content-Type":
            "application/json",
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36"
        },
        "env": {
            'ENV': 'prod',
        },
        "query_string": "foo=bar",
        "data": '{"hello": "world"}',
        "method": "GET"
    }

    start = datetime.utcnow()
    if timestamp:
        try:
            start = datetime.utcfromtimestamp(timestamp)
        except TypeError:
            pass

    # Make breadcrumb timestamps relative to right now so they make sense
    breadcrumbs = data.get('sentry.interfaces.Breadcrumbs')
    if breadcrumbs is not None:
        duration = 1000
        values = breadcrumbs['values']
        for value in reversed(values):
            value['timestamp'] = milliseconds_ago(start, duration)

            # Every breadcrumb is 1s apart
            duration += 1000

    return data
Example #60
0
    def post(self, request, project):
        """
        Assmble one or multiple chunks (FileBlob) into debug files
        ``````````````````````````````````````````````````````````

        :auth: required
        """
        schema = {
            "type": "object",
            "patternProperties": {
                "^[0-9a-f]{40}$": {
                    "type": "object",
                    "required": ["name", "chunks"],
                    "properties": {
                        "name": {
                            "type": "string"
                        },
                        "chunks": {
                            "type": "array",
                            "items": {
                                "type": "string"
                            }
                        }
                    },
                    "additionalProperties": False
                }
            },
            "additionalProperties": False
        }

        try:
            files = json.loads(request.body)
            jsonschema.validate(files, schema)
        except jsonschema.ValidationError as e:
            return Response({'error': str(e).splitlines()[0]}, status=400)
        except BaseException as e:
            return Response({'error': 'Invalid json body'}, status=400)

        file_response = {}

        from sentry.tasks.assemble import assemble_dif
        for checksum, file_to_assemble in six.iteritems(files):
            name = file_to_assemble.get('name', None)
            chunks = file_to_assemble.get('chunks', [])

            # First, check the cached assemble status. During assembling, a
            # ProjectDebugFile will be created and we need to prevent a race
            # condition.
            state, detail = get_assemble_status(project, checksum)
            if state is not None:
                file_response[checksum] = {
                    'state': state,
                    'detail': detail,
                    'missingChunks': [],
                }
                continue

            # Next, check if this project already owns the ProjectDebugFile.
            # This can under rare circumstances yield more than one file
            # which is why we use first() here instead of get().
            dif = ProjectDebugFile.objects \
                .filter(project=project, file__checksum=checksum) \
                .select_related('file') \
                .order_by('-id') \
                .first()

            if dif is not None:
                file_response[checksum] = {
                    'state': ChunkFileState.OK,
                    'detail': None,
                    'missingChunks': [],
                    'dif': serialize(dif),
                }
                continue

            # There is neither a known file nor a cached state, so we will
            # have to create a new file.  Assure that there are checksums.
            # If not, we assume this is a poll and report NOT_FOUND
            if not chunks:
                file_response[checksum] = {
                    'state': ChunkFileState.NOT_FOUND,
                    'missingChunks': [],
                }
                continue

            # Check if all requested chunks have been uploaded.
            missing_chunks = find_missing_chunks(project.organization, chunks)
            if missing_chunks:
                file_response[checksum] = {
                    'state': ChunkFileState.NOT_FOUND,
                    'missingChunks': missing_chunks,
                }
                continue

            # We don't have a state yet, this means we can now start
            # an assemble job in the background.
            set_assemble_status(project, checksum, state)
            assemble_dif.apply_async(
                kwargs={
                    'project_id': project.id,
                    'name': name,
                    'checksum': checksum,
                    'chunks': chunks,
                })

            file_response[checksum] = {
                'state': ChunkFileState.CREATED,
                'missingChunks': [],
            }

        return Response(file_response, status=200)