Exemplo n.º 1
0
 def has_apps_configured(self):
     return bool(
         options.get('github.apps-install-url') and
         options.get('github.integration-app-id') and
         options.get('github.integration-hook-secret') and
         options.get('github.integration-private-key')
     )
Exemplo n.º 2
0
def get_internal_source(project):
    """
    Returns the source configuration for a Sentry project.
    """
    internal_url_prefix = options.get('system.internal-url-prefix')
    if not internal_url_prefix:
        internal_url_prefix = options.get('system.url-prefix')
        if sys.platform == 'darwin':
            internal_url_prefix = internal_url_prefix \
                .replace("localhost", "host.docker.internal") \
                .replace("127.0.0.1", "host.docker.internal")

    assert internal_url_prefix
    sentry_source_url = '%s%s' % (
        internal_url_prefix.rstrip('/'),
        reverse('sentry-api-0-dsym-files', kwargs={
            'organization_slug': project.organization.slug,
            'project_slug': project.slug
        })
    )

    return {
        'type': 'sentry',
        'id': 'sentry:project',
        'url': sentry_source_url,
        'token': get_system_token(),
    }
Exemplo n.º 3
0
    def test_simple(self, safe_urlread, safe_urlopen,
                    mock_get_all_package_versions):
        mock_get_all_package_versions.return_value = {'foo': '1.0'}
        safe_urlread.return_value = json.dumps({
            'notices': [],
            'version': {'stable': '1.0.0'},
        })

        assert options.set('system.admin-email', '*****@*****.**')
        send_beacon()

        install_id = options.get('sentry:install-id')
        assert install_id and len(install_id) == 40

        safe_urlopen.assert_called_once_with(BEACON_URL, json={
            'install_id': install_id,
            'version': sentry.get_version(),
            'data': {
                'organizations': 1,
                'users': 0,
                'projects': 1,
                'teams': 1,
                'events.24h': 0,
            },
            'admin_email': '*****@*****.**',
            'packages': mock_get_all_package_versions.return_value,
        }, timeout=5)
        safe_urlread.assert_called_once_with(safe_urlopen.return_value)

        assert options.get('sentry:latest_version') == '1.0.0'
Exemplo n.º 4
0
def lookup_system_symbols(symbols, sdk_info=None, cpu_name=None):
    """Looks for system symbols in the configured system server if
    enabled.  If this failes or the server is disabled, `None` is
    returned.
    """
    if not options.get('symbolserver.enabled'):
        return

    url = '%s/lookup' % options.get('symbolserver.options')['url'].rstrip('/')
    sess = Session()
    symbol_query = {
        'sdk_id': sdk_info_to_sdk_id(sdk_info),
        'cpu_name': cpu_name,
        'symbols': symbols,
    }

    attempts = 0

    with sess:
        while 1:
            try:
                rv = sess.post(url, json=symbol_query)
                # If the symbols server does not know about the SDK at all
                # it will report a 404 here.  In that case just assume
                # that we did not find a match and do not retry.
                if rv.status_code == 404:
                    return None
                rv.raise_for_status()
                return rv.json()['symbols']
            except (IOError, RequestException):
                attempts += 1
                if attempts > MAX_ATTEMPTS:
                    logger.error('Failed to contact system symbol server',
                                 exc_info=True)
                    return
Exemplo n.º 5
0
    def get(self, request, organization):
        """
        Return chunk upload parameters
        ``````````````````````````````
        :auth: required
        """
        endpoint = options.get('system.upload-url-prefix')
        # We fallback to default system url if config is not set
        if len(endpoint) == 0:
            endpoint = options.get('system.url-prefix')

        url = reverse('sentry-api-0-chunk-upload', args=[organization.slug])
        endpoint = urljoin(endpoint.rstrip('/') + '/', url.lstrip('/'))

        return Response(
            {
                'url': endpoint,
                'chunkSize': DEFAULT_BLOB_SIZE,
                'chunksPerRequest': MAX_CHUNKS_PER_REQUEST,
                'maxRequestSize': MAX_REQUEST_SIZE,
                'concurrency': MAX_CONCURRENCY,
                'hashAlgorithm': HASH_ALGORITHM,
                'compression': ['gzip'],
            }
        )
Exemplo n.º 6
0
    def test_simple(self, safe_urlread, safe_urlopen):
        self.create_project(platform='java')

        safe_urlread.return_value = json.dumps({
            'notices': [],
            'version': {'stable': '1.0.0'},
        })

        with self.settings(SENTRY_ADMIN_EMAIL='*****@*****.**'):
            send_beacon()

        install_id = options.get('sentry:install-id')
        assert install_id and len(install_id) == 40

        safe_urlopen.assert_called_once_with(BEACON_URL, json={
            'install_id': install_id,
            'version': sentry.get_version(),
            'data': {
                'platforms': ['java'],
                'organizations': 2,
                'users': 2,
                'projects': 2,
                'teams': 2,
                'events.24h': 0,
            },
            'admin_email': '*****@*****.**',
        }, timeout=5)
        safe_urlread.assert_called_once_with(safe_urlopen.return_value)

        assert options.get('sentry:latest_version') == '1.0.0'
Exemplo n.º 7
0
    def test_chunk_parameters(self):
        response = self.client.get(
            self.url,
            HTTP_AUTHORIZATION=u'Bearer {}'.format(self.token.token),
            format='json'
        )

        endpoint = options.get('system.upload-url-prefix')
        # We fallback to default system url if config is not set
        if len(endpoint) == 0:
            endpoint = options.get('system.url-prefix')

        assert response.status_code == 200, response.content
        assert response.data['chunkSize'] == CHUNK_UPLOAD_BLOB_SIZE
        assert response.data['chunksPerRequest'] == MAX_CHUNKS_PER_REQUEST
        assert response.data['maxRequestSize'] == MAX_REQUEST_SIZE
        assert response.data['maxFileSize'] == options.get('system.maximum-file-size')
        assert response.data['concurrency'] == MAX_CONCURRENCY
        assert response.data['hashAlgorithm'] == HASH_ALGORITHM
        assert response.data['url'] == options.get('system.url-prefix') + self.url

        options.set('system.upload-url-prefix', 'test')
        response = self.client.get(
            self.url,
            HTTP_AUTHORIZATION=u'Bearer {}'.format(self.token.token),
            format='json'
        )

        assert response.data['url'] == options.get('system.upload-url-prefix') + self.url
Exemplo n.º 8
0
    def _publish_to_kafka(self, request):
        """
        Sends raw event data to Kafka for later offline processing.
        """
        try:
            # This may fail when we e.g. send a multipart form. We ignore those errors for now.
            data = request.body

            if not data or len(data) > options.get('kafka-publisher.max-event-size'):
                return

            # Sampling
            if random.random() >= options.get('kafka-publisher.raw-event-sample-rate'):
                return

            # We want to send only serializable items from request.META
            meta = {}
            for key, value in request.META.items():
                try:
                    json.dumps([key, value])
                    meta[key] = value
                except (TypeError, ValueError):
                    pass

            meta['SENTRY_API_VIEW_NAME'] = self.__class__.__name__

            kafka_publisher.publish(
                channel=getattr(settings, 'KAFKA_RAW_EVENTS_PUBLISHER_TOPIC', 'raw-store-events'),
                value=json.dumps([meta, base64.b64encode(data)])
            )
        except Exception as e:
            logger.debug("Cannot publish event to Kafka: {}".format(e.message))
Exemplo n.º 9
0
 def test_put_simple(self):
     self.login_as(user=self.user)
     assert options.get('mail.host') != 'lolcalhost'
     response = self.client.put(self.url, {
         'mail.host': 'lolcalhost',
     })
     assert response.status_code == 200
     assert options.get('mail.host') == 'lolcalhost'
Exemplo n.º 10
0
def u2f_appid(request):
    facets = options.get("u2f.facets")
    if not facets:
        facets = [options.get("system.url-prefix")]
    return HttpResponse(
        json.dumps({"trustedFacets": [{"version": {"major": 1, "minor": 0}, "ids": [x.rstrip("/") for x in facets]}]}),
        content_type="application/fido.trusted-apps+json",
    )
Exemplo n.º 11
0
def status_mail(request):
    form = TestEmailForm(request.POST or None)

    if form.is_valid():
        body = """This email was sent as a request to test the Sentry outbound email configuration."""
        try:
            send_mail(
                "%s Test Email" % (options.get("mail.subject-prefix"),),
                body,
                options.get("mail.from"),
                [request.user.email],
                fail_silently=False,
            )
        except Exception as e:
            form.errors["__all__"] = [six.text_type(e)]

    return render_to_response(
        "sentry/admin/status/mail.html",
        {
            "form": form,
            "mail_host": options.get("mail.host"),
            "mail_password": bool(options.get("mail.password")),
            "mail_username": options.get("mail.username"),
            "mail_port": options.get("mail.port"),
            "mail_use_tls": options.get("mail.use-tls"),
            "mail_from": options.get("mail.from"),
            "mail_list_namespace": options.get("mail.list-namespace"),
        },
        request,
    )
Exemplo n.º 12
0
def status_mail(request):
    form = TestEmailForm(request.POST or None)

    if form.is_valid():
        body = """This email was sent as a request to test the Sentry outbound email configuration."""
        try:
            send_mail(
                '%s Test Email' % (options.get('mail.subject-prefix'), ),
                body,
                options.get('mail.from'), [request.user.email],
                fail_silently=False
            )
        except Exception as e:
            form.errors['__all__'] = [six.text_type(e)]

    return render_to_response(
        'sentry/admin/status/mail.html', {
            'form': form,
            'mail_host': options.get('mail.host'),
            'mail_password': bool(options.get('mail.password')),
            'mail_username': options.get('mail.username'),
            'mail_port': options.get('mail.port'),
            'mail_use_tls': options.get('mail.use-tls'),
            'mail_from': options.get('mail.from'),
            'mail_list_namespace': options.get('mail.list-namespace'),
        }, request
    )
Exemplo n.º 13
0
def get_storage():
    from sentry import options
    backend = options.get('filestore.backend')
    options = options.get('filestore.options')

    try:
        backend = settings.SENTRY_FILESTORE_ALIASES[backend]
    except KeyError:
        pass

    storage = get_storage_class(backend)
    return storage(**options)
Exemplo n.º 14
0
def get_connection(fail_silently=False):
    """
    Gets an SMTP connection using our OptionsStore
    """
    return _get_connection(
        backend=get_mail_backend(),
        host=options.get('mail.host'),
        port=options.get('mail.port'),
        username=options.get('mail.username'),
        password=options.get('mail.password'),
        use_tls=options.get('mail.use-tls'),
        fail_silently=fail_silently,
    )
Exemplo n.º 15
0
def u2f_appid(request):
    facets = options.get('u2f.facets')
    if not facets:
        facets = [options.get('system.url-prefix')]
    return HttpResponse(json.dumps({
        'trustedFacets': [{
            'version': {
                'major': 1,
                'minor': 0
            },
            'ids': [x.rstrip('/') for x in facets]
        }]
    }), content_type='application/fido.trusted-apps+json')
Exemplo n.º 16
0
def _is_symbolicator_enabled(project, data):
    if not options.get('symbolicator.enabled'):
        return False

    if project.get_option('sentry:symbolicator-enabled'):
        return True

    percentage = options.get('sentry:symbolicator-percent-opt-in') or 0
    if percentage > 0:
        id_bit = int(data['event_id'][4:6], 16)
        return id_bit < percentage * 256

    return False
Exemplo n.º 17
0
    def get(self, request):
        query = request.GET.get('query')
        if query == 'is:required':
            option_list = options.filter(flag=options.FLAG_REQUIRED)
        elif query:
            raise ValueError('{} is not a supported search query'.format(query))
        else:
            option_list = options.all()

        results = {}
        for k in option_list:
            # TODO(mattrobenolt): Expose this as a property on Key.
            diskPriority = bool(k.flags & options.FLAG_PRIORITIZE_DISK and settings.SENTRY_OPTIONS.get(k.name))

            # TODO(mattrobenolt): help, placeholder, title, type
            results[k.name] = {
                'value': options.get(k.name),
                'field': {
                    'default': k.default,
                    'required': bool(k.flags & options.FLAG_REQUIRED),
                    # We're disabled if the disk has taken priority
                    'disabled': diskPriority,
                    'disabledReason': 'diskPriority' if diskPriority else None,
                }
            }

        return Response(results)
Exemplo n.º 18
0
    def get(self, request):
        query = request.GET.get('query')
        if query == 'is:required':
            option_list = options.filter(flag=options.FLAG_REQUIRED)
        elif query:
            return Response('{} is not a supported search query'.format(query), status=400)
        else:
            option_list = options.all()

        smtp_disabled = not is_smtp_enabled()

        results = {}
        for k in option_list:
            disabled, disabled_reason = False, None

            if smtp_disabled and k.name[:5] == 'mail.':
                disabled_reason, disabled = 'smtpDisabled', True
            elif bool(k.flags & options.FLAG_PRIORITIZE_DISK and settings.SENTRY_OPTIONS.get(k.name)):
                # TODO(mattrobenolt): Expose this as a property on Key.
                disabled_reason, disabled = 'diskPriority', True

            # TODO(mattrobenolt): help, placeholder, title, type
            results[k.name] = {
                'value': options.get(k.name),
                'field': {
                    'default': k.default(),
                    'required': bool(k.flags & options.FLAG_REQUIRED),
                    'disabled': disabled,
                    'disabledReason': disabled_reason,
                    'isSet': options.isset(k.name),
                    'allowEmpty': bool(k.flags & options.FLAG_ALLOW_EMPTY),
                }
            }

        return Response(results)
Exemplo n.º 19
0
    def send_async(self, to=None, cc=None, bcc=None):
        from sentry.tasks.email import send_email
        fmt = options.get('system.logging-format')
        messages = self.get_built_messages(to, cc=cc, bcc=bcc)
        extra = {'message_type': self.type}
        loggable = [v for k, v in six.iteritems(self.context) if hasattr(v, 'id')]
        for context in loggable:
            extra['%s_id' % type(context).__name__.lower()] = context.id

        log_mail_queued = partial(logger.info, 'mail.queued', extra=extra)
        for message in messages:
            safe_execute(
                send_email.delay,
                message=message,
                _with_transaction=False,
            )
            extra['message_id'] = message.extra_headers['Message-Id']
            metrics.incr('email.queued', instance=self.type)
            if fmt == LoggingFormat.HUMAN:
                extra['message_to'] = self.format_to(message.to),
                log_mail_queued()
            elif fmt == LoggingFormat.MACHINE:
                for recipient in message.to:
                    extra['message_to'] = recipient
                    log_mail_queued()
Exemplo n.º 20
0
    def __init__(self, subject, context=None, template=None, html_template=None,
                 body=None, html_body=None, headers=None, reference=None,
                 reply_reference=None, from_email=None, type=None):
        assert not (body and template)
        assert not (html_body and html_template)
        assert context or not (template or html_template)

        if headers is None:
            headers = {}

        self.subject = subject
        self.context = context or {}
        self.template = template
        self.html_template = html_template
        self._txt_body = body
        self._html_body = html_body
        self.headers = headers
        self.reference = reference  # The object that generated this message
        self.reply_reference = reply_reference  # The object this message is replying about
        self.from_email = from_email or options.get('mail.from')
        self._send_to = set()
        self.type = type if type else 'generic'

        if reference is not None and 'List-Id' not in headers:
            try:
                headers['List-Id'] = make_listid_from_instance(reference)
            except ListResolver.UnregisteredTypeError as error:
                logger.debug(str(error))
            except AssertionError as error:
                logger.warning(str(error))
Exemplo n.º 21
0
 def check(self):
     last_ping = options.get('sentry:last_worker_ping') or 0
     if last_ping >= time() - 300:
         return []
     return [
         Problem("Background workers haven't checked in recently. This can mean an issue with your configuration or a serious backlog in tasks."),
     ]
Exemplo n.º 22
0
def log_entry(entry, logger=logger):
    """
    Give an AuditLogEntry object to the audit logger.
    """
    fmt = options.get('system.logging-format')
    if fmt == 'human':
        log(
            u'[Audit Log] [{org}] {user} {note}'.format(
                org=entry.organization_id,
                user=entry.actor_label,
                note=entry.get_note(),
            ),
            logger=logger,
        )
    elif fmt == 'machine':
        log(
            dict(
                organization_id=entry.organization_id,
                actor_id=entry.actor_id,
                actor_key=entry.actor_key,
                target_object=entry.target_object,
                target_user_id=entry.target_user_id,
                event=entry.get_event_display(),
                ip_address=entry.ip_address,
                data=entry.data,
                datetime=entry.datetime,
            ),
            logger=logger,
        )
Exemplo n.º 23
0
    def send_confirm_email_singular(self, email, is_new_user=False):
        from sentry import options
        from sentry.utils.email import MessageBuilder

        if not email.hash_is_valid():
            email.set_hash()
            email.save()

        context = {
            'user':
            self,
            'url':
            absolute_uri(
                reverse('sentry-account-confirm-email', args=[self.id, email.validation_hash])
            ),
            'confirm_email':
            email.email,
            'is_new_user':
            is_new_user,
        }
        msg = MessageBuilder(
            subject='%sConfirm Email' % (options.get('mail.subject-prefix'), ),
            template='sentry/emails/confirm_email.txt',
            html_template='sentry/emails/confirm_email.html',
            type='user.confirm_email',
            context=context,
        )
        msg.send_async([email.email])
Exemplo n.º 24
0
    def get_form(self, request, project):
        organization = project.organization
        team_list = [
            t for t in Team.objects.get_for_user(
                organization=organization,
                user=request.user,
            )
            if request.access.has_team_scope(t, self.required_scope)
        ]

        # TODO(dcramer): this update should happen within a lock
        security_token = project.get_option('sentry:token', None)
        if security_token is None:
            security_token = uuid1().hex
            project.update_option('sentry:token', security_token)

        return EditProjectForm(
            request, organization, team_list, request.POST or None,
            instance=project,
            initial={
                'origins': '\n'.join(project.get_option('sentry:origins', ['*'])),
                'token': security_token,
                'resolve_age': int(project.get_option('sentry:resolve_age', 0)),
                'scrub_data': bool(project.get_option('sentry:scrub_data', True)),
                'scrub_defaults': bool(project.get_option('sentry:scrub_defaults', True)),
                'sensitive_fields': '\n'.join(project.get_option('sentry:sensitive_fields', None) or []),
                'safe_fields': '\n'.join(project.get_option('sentry:safe_fields', None) or []),
                'scrub_ip_address': bool(project.get_option('sentry:scrub_ip_address', False)),
                'scrape_javascript': bool(project.get_option('sentry:scrape_javascript', True)),
                'blacklisted_ips': '\n'.join(project.get_option('sentry:blacklisted_ips', [])),
                'default_environment': project.get_option('sentry:default_environment'),
                'mail_subject_prefix': project.get_option(
                    'mail:subject_prefix', options.get('mail.subject-prefix')),
            },
        )
Exemplo n.º 25
0
    def test_simple(self, mock_sync_integration_docs):
        responses.add("GET", "https://docs.getsentry.com/hosted/_platforms/_index.json", body=INDEX_JSON)

        sync_docs()

        data = options.get("sentry:docs")
        assert data == {
            "platforms": [
                {
                    "id": "go",
                    "integrations": [
                        {
                            "id": "go",
                            "link": "https://docs.getsentry.com/hosted/clients/go/",
                            "name": "Go",
                            "type": "language",
                        },
                        {"id": "go-http", "link": None, "name": "net/http", "type": "framework"},
                    ],
                    "name": "Go",
                }
            ]
        }

        assert mock_sync_integration_docs.mock_calls == [
            mock.call.delay("go", "_self", "go.json"),
            mock.call.delay("go", "http", "go/http.json"),
        ]
Exemplo n.º 26
0
    def test_integration(self):
        responses.add("GET", "https://docs.getsentry.com/hosted/_platforms/go/http.json", body=GO_HTTP_JSON)

        sync_integration_docs("go", "http", "go/http.json")

        data = options.get("sentry:docs:go-http")
        assert data == {"id": "go-http", "html": "foo bar", "link": None, "name": "net/http"}
Exemplo n.º 27
0
    def get_organization_quota(self, organization):
        from sentry.models import OrganizationOption

        account_limit = int(OrganizationOption.objects.get_value(
            organization=organization,
            key='sentry:account-rate-limit',
            default=0,
        ))

        system_limit = options.get('system.rate-limit')

        # If there is only a single org, this one org should
        # be allowed to consume the entire quota.
        if settings.SENTRY_SINGLE_ORGANIZATION:
            if system_limit < account_limit:
                return (system_limit, 60)
            return (account_limit, 3600)

        # an account limit is enforced, which is set as a fixed value and cannot
        # utilize percentage based limits
        elif account_limit:
            return (account_limit, 3600)

        return (self.translate_quota(
            settings.SENTRY_DEFAULT_MAX_EVENTS_PER_MINUTE,
            system_limit,
        ), 60)
Exemplo n.º 28
0
    def check(self):
        # There is no queue, and celery is not running, so never show error
        if settings.CELERY_ALWAYS_EAGER:
            return []
        last_ping = options.get('sentry:last_worker_ping') or 0
        if last_ping >= time() - 300:
            return []

        backlogged, size = None, 0
        from sentry.monitoring.queues import backend
        if backend is not None:
            size = backend.get_size('default')
            backlogged = size > 0

        message = "Background workers haven't checked in recently. "
        if backlogged:
            message += "It seems that you have a backlog of %d tasks. Either your workers aren't running or you need more capacity." % size
        else:
            message += "This is likely an issue with your configuration or the workers aren't running."

        return [
            Problem(
                message,
                url=absolute_uri('/manage/queue/'),
            ),
        ]
Exemplo n.º 29
0
    def post(self, request):
        token = request.POST['token']
        signature = request.POST['signature']
        timestamp = request.POST['timestamp']

        key = options.get('mail.mailgun-api-key')
        if not key:
            logging.error('mail.mailgun-api-key is not set')
            return HttpResponse(status=500)

        if not self.verify(key, token, timestamp, signature):
            logging.info('Unable to verify signature for mailgun request')
            return HttpResponse(status=403)

        to_email = parseaddr(request.POST['To'])[1]
        from_email = parseaddr(request.POST['From'])[1]

        try:
            group_id = email_to_group_id(to_email)
        except Exception:
            logging.info('%r is not a valid email address', to_email)
            return HttpResponse(status=500)

        payload = EmailReplyParser.parse_reply(request.POST['body-plain']).strip()
        if not payload:
            # If there's no body, we don't need to go any further
            return HttpResponse(status=200)

        process_inbound_email.delay(from_email, group_id, payload)

        return HttpResponse(status=201)
Exemplo n.º 30
0
def get_install_id():
    from sentry import options
    install_id = options.get('sentry:install-id')
    if not install_id:
        install_id = sha1(uuid4().bytes).hexdigest()
        options.set('sentry:install-id', install_id)
    return install_id
Exemplo n.º 31
0
def up(project, exclude):
    "Run/update dependent services."
    os.environ['SENTRY_SKIP_BACKEND_VALIDATION'] = '1'

    from sentry.runner import configure
    configure()

    from django.conf import settings
    from sentry import options as sentry_options

    import docker
    client = get_docker_client()

    # This is brittle, but is the best way now to limit what
    # services are run if they're not needed.
    if not exclude:
        exclude = set()

    if 'bigtable' not in settings.SENTRY_NODESTORE:
        exclude |= {'bigtable'}

    if 'memcached' not in settings.CACHES.get('default', {}).get('BACKEND'):
        exclude |= {'memcached'}

    if 'kafka' in settings.SENTRY_EVENTSTREAM:
        pass
    elif 'snuba' in settings.SENTRY_EVENTSTREAM:
        click.secho(
            '! Skipping kafka and zookeeper since your eventstream backend does not require it',
            err=True,
            fg='cyan')
        exclude |= {'kafka', 'zookeeper'}
    else:
        click.secho(
            '! Skipping kafka, zookeeper, snuba, and clickhouse since your eventstream backend does not require it',
            err=True,
            fg='cyan')
        exclude |= {'kafka', 'zookeeper', 'snuba', 'clickhouse'}

    if not sentry_options.get('symbolicator.enabled'):
        exclude |= {'symbolicator'}

    get_or_create(client, 'network', project)

    containers = {}
    for name, options in settings.SENTRY_DEVSERVICES.items():
        if name in exclude:
            continue
        options = options.copy()
        options['network'] = project
        options['detach'] = True
        options['name'] = project + '_' + name
        options.setdefault('ports', {})
        options.setdefault('environment', {})
        options.setdefault('restart_policy', {'Name': 'on-failure'})
        options['ports'] = ensure_interface(options['ports'])
        containers[name] = options

    pulled = set()
    for name, options in containers.items():
        # HACK(mattrobenolt): special handle snuba backend becuase it needs to
        # handle different values based on the eventstream backend
        # For snuba, we can't run the full suite of devserver, but can only
        # run the api.
        if name == 'snuba' and 'snuba' in settings.SENTRY_EVENTSTREAM:
            options['environment'].pop('DEFAULT_BROKERS', None)
            options['command'] = ['devserver', '--no-workers']

        for key, value in options['environment'].items():
            options['environment'][key] = value.format(containers=containers)
        if options.pop('pull', False) and options['image'] not in pulled:
            click.secho("> Pulling image '%s'" % options['image'], err=True, fg='green')
            client.images.pull(options['image'])
            pulled.add(options['image'])
        for mount in options.get('volumes', {}).keys():
            if '/' not in mount:
                get_or_create(client, 'volume', project + '_' + mount)
                options['volumes'][project + '_' + mount] = options['volumes'].pop(mount)
        try:
            container = client.containers.get(options['name'])
        except docker.errors.NotFound:
            pass
        else:
            container.stop()
            container.remove()
        listening = ''
        if options['ports']:
            listening = ' (listening: %s)' % ', '.join(map(text_type, options['ports'].values()))
        click.secho("> Creating '%s' container%s" %
                    (options['name'], listening), err=True, fg='yellow')
        client.containers.run(**options)
Exemplo n.º 32
0
def _do_process_event(
    cache_key,
    start_time,
    event_id,
    process_task,
    data=None,
    data_has_changed=None,
    from_symbolicate=False,
):
    from sentry.plugins.base import plugins

    if data is None:
        data = event_processing_store.get(cache_key)

    if data is None:
        metrics.incr("events.failed",
                     tags={
                         "reason": "cache",
                         "stage": "process"
                     },
                     skip_internal=False)
        error_logger.error("process.failed.empty",
                           extra={"cache_key": cache_key})
        return

    data = CanonicalKeyDict(data)

    project_id = data["project"]
    set_current_project(project_id)

    event_id = data["event_id"]

    with sentry_sdk.start_span(
            op="tasks.store.process_event.get_project_from_cache"):
        project = Project.objects.get_from_cache(id=project_id)

    with metrics.timer(
            "tasks.store.process_event.organization.get_from_cache"):
        project._organization_cache = Organization.objects.get_from_cache(
            id=project.organization_id)

    has_changed = bool(data_has_changed)

    with sentry_sdk.start_span(
            op="tasks.store.process_event.get_reprocessing_revision"):
        # Fetch the reprocessing revision
        reprocessing_rev = reprocessing.get_reprocessing_revision(project_id)

    # Stacktrace based event processors.
    with sentry_sdk.start_span(op="task.store.process_event.stacktraces"):
        with metrics.timer("tasks.store.process_event.stacktraces",
                           tags={"from_symbolicate": from_symbolicate}):
            new_data = process_stacktraces(data)

    if new_data is not None:
        has_changed = True
        data = new_data

    # Second round of datascrubbing after stacktrace and language-specific
    # processing. First round happened as part of ingest.
    #
    # *Right now* the only sensitive data that is added in stacktrace
    # processing are usernames in filepaths, so we run directly after
    # stacktrace processors.
    #
    # We do not yet want to deal with context data produced by plugins like
    # sessionstack or fullstory (which are in `get_event_preprocessors`), as
    # this data is very unlikely to be sensitive data. This is why scrubbing
    # happens somewhere in the middle of the pipeline.
    #
    # On the other hand, Javascript event error translation is happening after
    # this block because it uses `get_event_preprocessors` instead of
    # `get_event_enhancers`.
    #
    # We are fairly confident, however, that this should run *before*
    # re-normalization as it is hard to find sensitive data in partially
    # trimmed strings.
    if has_changed and options.get("processing.can-use-scrubbers"):
        with sentry_sdk.start_span(op="task.store.datascrubbers.scrub"):
            with metrics.timer("tasks.store.datascrubbers.scrub",
                               tags={"from_symbolicate": from_symbolicate}):
                new_data = safe_execute(scrub_data,
                                        project=project,
                                        event=data.data)

                # XXX(markus): When datascrubbing is finally "totally stable", we might want
                # to drop the event if it crashes to avoid saving PII
                if new_data is not None:
                    data.data = new_data

    # TODO(dcramer): ideally we would know if data changed by default
    # Default event processors.
    for plugin in plugins.all(version=2):
        with sentry_sdk.start_span(
                op="task.store.process_event.preprocessors") as span:
            span.set_data("plugin", plugin.slug)
            span.set_data("from_symbolicate", from_symbolicate)
            with metrics.timer(
                    "tasks.store.process_event.preprocessors",
                    tags={
                        "plugin": plugin.slug,
                        "from_symbolicate": from_symbolicate
                    },
            ):
                processors = safe_execute(plugin.get_event_preprocessors,
                                          data=data,
                                          _with_transaction=False)
                for processor in processors or ():
                    try:
                        result = processor(data)
                    except Exception:
                        error_logger.exception(
                            "tasks.store.preprocessors.error")
                        data.setdefault("_metrics",
                                        {})["flag.processing.error"] = True
                        has_changed = True
                    else:
                        if result:
                            data = result
                            has_changed = True

    assert data[
        "project"] == project_id, "Project cannot be mutated by plugins"

    # We cannot persist canonical types in the cache, so we need to
    # downgrade this.
    if isinstance(data, CANONICAL_TYPES):
        data = dict(data.items())

    if has_changed:
        # Run some of normalization again such that we don't:
        # - persist e.g. incredibly large stacktraces from minidumps
        # - store event timestamps that are older than our retention window
        #   (also happening with minidumps)
        normalizer = StoreNormalizer(remove_other=False,
                                     is_renormalize=True,
                                     **DEFAULT_STORE_NORMALIZER_ARGS)
        data = normalizer.normalize_event(dict(data))

        issues = data.get("processing_issues")

        try:
            if issues and create_failed_event(
                    cache_key,
                    data,
                    project_id,
                    list(issues.values()),
                    event_id=event_id,
                    start_time=start_time,
                    reprocessing_rev=reprocessing_rev,
            ):
                return
        except RetryProcessing:
            # If `create_failed_event` indicates that we need to retry we
            # invoke ourselves again.  This happens when the reprocessing
            # revision changed while we were processing.
            _do_preprocess_event(cache_key, data, start_time, event_id,
                                 process_task, project)
            return

        cache_key = event_processing_store.store(data)

    from_reprocessing = process_task is process_event_from_reprocessing
    submit_save_event(project, from_reprocessing, cache_key, event_id,
                      start_time, data)
Exemplo n.º 33
0
from django.conf import settings

from sentry import options
from sentry.utils.services import LazyServiceWrapper

from .base import Analytics  # NOQA
from .event import *  # NOQA
from .event_manager import default_manager


def get_backend_path(backend):
    try:
        backend = settings.SENTRY_ANALYTICS_ALIASES[backend]
    except KeyError:
        pass
    return backend


backend = LazyServiceWrapper(
    Analytics, get_backend_path(options.get("analytics.backend")),
    options.get("analytics.options"))
backend.expose(locals())

register = default_manager.register
Exemplo n.º 34
0
def fetch_release_archive_for_url(release, dist, url) -> Optional[IO]:
    """Fetch release archive and cache if possible.

    Multiple archives might have been uploaded, so we need the URL
    to get the correct archive from the artifact index.

    If return value is not empty, the caller is responsible for closing the stream.
    """
    with sentry_sdk.start_span(
            op="fetch_release_archive_for_url.get_index_entry"):
        info = get_index_entry(release, dist, url)
    if info is None:
        # Cannot write negative cache entry here because ID of release archive
        # is not yet known
        return None

    archive_ident = info["archive_ident"]

    # TODO(jjbayer): Could already extract filename from info and return
    # it later

    cache_key = get_release_file_cache_key(release_id=release.id,
                                           releasefile_ident=archive_ident)

    result = cache.get(cache_key)

    if result == -1:
        return None
    elif result:
        return BytesIO(result)
    else:
        try:
            with sentry_sdk.start_span(
                    op="fetch_release_archive_for_url.get_releasefile_db_entry"
            ):
                qs = ReleaseFile.objects.filter(
                    release_id=release.id,
                    dist_id=dist.id if dist else dist,
                    ident=archive_ident).select_related("file")
                releasefile = qs[0]
        except IndexError:
            # This should not happen when there is an archive_ident in the manifest
            logger.error("sourcemaps.missing_archive", exc_info=sys.exc_info())
            # Cache as nonexistent:
            cache.set(cache_key, -1, 60)
            return None
        else:
            try:
                with sentry_sdk.start_span(
                        op="fetch_release_archive_for_url.fetch_releasefile"):
                    if releasefile.file.size <= options.get(
                            "releasefile.cache-max-archive-size"):
                        getfile = lambda: ReleaseFile.cache.getfile(releasefile
                                                                    )
                    else:
                        # For very large ZIP archives, pulling the entire file into cache takes too long.
                        # Only the blobs required to extract the current artifact (central directory and the file entry itself)
                        # should be loaded in this case.
                        getfile = releasefile.file.getfile

                    file_ = fetch_retry_policy(getfile)
            except Exception:
                logger.error("sourcemaps.read_archive_failed",
                             exc_info=sys.exc_info())

                return None

            # `cache.set` will only keep values up to a certain size,
            # so we should not read the entire file if it's too large for caching
            if CACHE_MAX_VALUE_SIZE is not None and file_.size > CACHE_MAX_VALUE_SIZE:

                return file_

            with sentry_sdk.start_span(
                    op="fetch_release_archive_for_url.read_for_caching"
            ) as span:
                span.set_data("file_size", file_.size)
                contents = file_.read()
            with sentry_sdk.start_span(
                    op="fetch_release_archive_for_url.write_to_cache") as span:
                span.set_data("file_size", len(contents))
                cache.set(cache_key, contents, 3600)

            file_.seek(0)

            return file_
Exemplo n.º 35
0
def get_facets(query, params, limit=10, referrer=None):
    """
    High-level API for getting 'facet map' results.

    Facets are high frequency tags and attribute results that
    can be used to further refine user queries. When many projects
    are requested sampling will be enabled to help keep response times low.

    query (str) Filter query string to create conditions from.
    params (Dict[str, str]) Filtering parameters with start, end, project_id, environment
    limit (int) The number of records to fetch.
    referrer (str|None) A referrer string to help locate the origin of this query.

    Returns Sequence[FacetResult]
    """
    with sentry_sdk.start_span(
        op="discover.discover", description="facets.filter_transform"
    ) as span:
        span.set_data("query", query)
        snuba_filter = get_filter(query, params)

        # Resolve the public aliases into the discover dataset names.
        snuba_filter, translated_columns = resolve_discover_aliases(snuba_filter)

    # Exclude tracing tags as they are noisy and generally not helpful.
    # TODO(markus): Tracing tags are no longer written but may still reside in DB.
    excluded_tags = ["tags_key", "NOT IN", ["trace", "trace.ctx", "trace.span", "project"]]

    # Sampling keys for multi-project results as we don't need accuracy
    # with that much data.
    sample = len(snuba_filter.filter_keys["project_id"]) > 2

    with sentry_sdk.start_span(op="discover.discover", description="facets.frequent_tags"):
        # Get the most frequent tag keys
        key_names = raw_query(
            aggregations=[["count", None, "count"]],
            start=snuba_filter.start,
            end=snuba_filter.end,
            conditions=snuba_filter.conditions,
            filter_keys=snuba_filter.filter_keys,
            orderby=["-count", "tags_key"],
            groupby="tags_key",
            having=[excluded_tags],
            dataset=Dataset.Discover,
            limit=limit,
            referrer=referrer,
            turbo=sample,
        )
        top_tags = [r["tags_key"] for r in key_names["data"]]
        if not top_tags:
            return []

    # TODO(mark) Make the sampling rate scale based on the result size and scaling factor in
    # sentry.options. To test the lowest acceptable sampling rate, we use 0.1 which
    # is equivalent to turbo. We don't use turbo though as we need to re-scale data, and
    # using turbo could cause results to be wrong if the value of turbo is changed in snuba.
    sampling_enabled = options.get("discover2.tags_facet_enable_sampling")
    sample_rate = 0.1 if (sampling_enabled and key_names["data"][0]["count"] > 10000) else None
    # Rescale the results if we're sampling
    multiplier = 1 / sample_rate if sample_rate is not None else 1

    fetch_projects = False
    if len(params.get("project_id", [])) > 1:
        if len(top_tags) == limit:
            top_tags.pop()
        fetch_projects = True

    results = []
    if fetch_projects:
        with sentry_sdk.start_span(op="discover.discover", description="facets.projects"):
            project_values = raw_query(
                aggregations=[["count", None, "count"]],
                start=snuba_filter.start,
                end=snuba_filter.end,
                conditions=snuba_filter.conditions,
                filter_keys=snuba_filter.filter_keys,
                groupby="project_id",
                orderby="-count",
                dataset=Dataset.Discover,
                referrer=referrer,
                sample=sample_rate,
                # Ensures Snuba will not apply FINAL
                turbo=sample_rate is not None,
            )
            results.extend(
                [
                    FacetResult("project", r["project_id"], int(r["count"]) * multiplier)
                    for r in project_values["data"]
                ]
            )

    # Get tag counts for our top tags. Fetching them individually
    # allows snuba to leverage promoted tags better and enables us to get
    # the value count we want.
    max_aggregate_tags = options.get("discover2.max_tags_to_combine")
    individual_tags = []
    aggregate_tags = []
    for i, tag in enumerate(top_tags):
        if tag == "environment":
            # Add here tags that you want to be individual
            individual_tags.append(tag)
        elif i >= len(top_tags) - max_aggregate_tags:
            aggregate_tags.append(tag)
        else:
            individual_tags.append(tag)

    with sentry_sdk.start_span(
        op="discover.discover", description="facets.individual_tags"
    ) as span:
        span.set_data("tag_count", len(individual_tags))
        for tag_name in individual_tags:
            tag = f"tags[{tag_name}]"
            tag_values = raw_query(
                aggregations=[["count", None, "count"]],
                conditions=snuba_filter.conditions,
                start=snuba_filter.start,
                end=snuba_filter.end,
                filter_keys=snuba_filter.filter_keys,
                orderby=["-count"],
                groupby=[tag],
                limit=TOP_VALUES_DEFAULT_LIMIT,
                dataset=Dataset.Discover,
                referrer=referrer,
                sample=sample_rate,
                # Ensures Snuba will not apply FINAL
                turbo=sample_rate is not None,
            )
            results.extend(
                [
                    FacetResult(tag_name, r[tag], int(r["count"]) * multiplier)
                    for r in tag_values["data"]
                ]
            )

    if aggregate_tags:
        with sentry_sdk.start_span(op="discover.discover", description="facets.aggregate_tags"):
            conditions = snuba_filter.conditions
            conditions.append(["tags_key", "IN", aggregate_tags])
            tag_values = raw_query(
                aggregations=[["count", None, "count"]],
                conditions=conditions,
                start=snuba_filter.start,
                end=snuba_filter.end,
                filter_keys=snuba_filter.filter_keys,
                orderby=["tags_key", "-count"],
                groupby=["tags_key", "tags_value"],
                dataset=Dataset.Discover,
                referrer=referrer,
                sample=sample_rate,
                # Ensures Snuba will not apply FINAL
                turbo=sample_rate is not None,
                limitby=[TOP_VALUES_DEFAULT_LIMIT, "tags_key"],
            )
            results.extend(
                [
                    FacetResult(r["tags_key"], r["tags_value"], int(r["count"]) * multiplier)
                    for r in tag_values["data"]
                ]
            )

    return results
Exemplo n.º 36
0
    def run(self) -> None:
        logger.debug("Starting snuba query subscriber")
        self.offsets.clear()

        def on_assign(consumer: Consumer,
                      partitions: List[TopicPartition]) -> None:
            updated_partitions: List[TopicPartition] = []
            for partition in partitions:
                if self.resolve_partition_force_offset:
                    partition = self.resolve_partition_force_offset(partition)
                    updated_partitions.append(partition)

                if partition.offset == OFFSET_INVALID:
                    updated_offset = None
                else:
                    updated_offset = partition.offset
                self.offsets[partition.partition] = updated_offset
            if updated_partitions:
                self.consumer.assign(updated_partitions)
            logger.info(
                "query-subscription-consumer.on_assign",
                extra={
                    "offsets": str(self.offsets),
                    "partitions": str(partitions),
                },
            )

        def on_revoke(consumer: Consumer,
                      partitions: List[TopicPartition]) -> None:
            partition_numbers = [
                partition.partition for partition in partitions
            ]
            self.commit_offsets(partition_numbers)
            for partition_number in partition_numbers:
                self.offsets.pop(partition_number, None)
            logger.info(
                "query-subscription-consumer.on_revoke",
                extra={
                    "offsets": str(self.offsets),
                    "partitions": str(partitions),
                },
            )

        self.consumer = Consumer(self.cluster_options)
        self.__shutdown_requested = False

        if settings.KAFKA_CONSUMER_AUTO_CREATE_TOPICS:
            # This is required for confluent-kafka>=1.5.0, otherwise the topics will
            # not be automatically created.
            admin_client = AdminClient(self.admin_cluster_options)
            wait_for_topics(admin_client, [self.topic])

        self.consumer.subscribe([self.topic],
                                on_assign=on_assign,
                                on_revoke=on_revoke)

        i = 0
        while not self.__shutdown_requested:
            message = self.consumer.poll(0.1)
            if message is None:
                continue

            error = message.error()
            if error is not None:
                raise KafkaException(error)

            i = i + 1

            with sentry_sdk.start_transaction(
                    op="handle_message",
                    name="query_subscription_consumer_process_message",
                    sampled=random() <=
                    options.get("subscriptions-query.sample-rate"),
            ), metrics.timer("snuba_query_subscriber.handle_message"):
                self.handle_message(message)

            # Track latest completed message here, for use in `shutdown` handler.
            self.offsets[message.partition()] = message.offset() + 1

            if i % self.commit_batch_size == 0:
                logger.debug("Committing offsets")
                self.commit_offsets()

        logger.debug("Committing offsets and closing consumer")
        self.commit_offsets()
        self.consumer.close()
Exemplo n.º 37
0
 def _build_subject_prefix(self, project):
     subject_prefix = ProjectOption.objects.get_value(project, self.mail_option_key, None)
     if not subject_prefix:
         subject_prefix = options.get("mail.subject-prefix")
     return force_text(subject_prefix)
Exemplo n.º 38
0
 def get_secret(self):
     return options.get("github-app.webhook-secret")
Exemplo n.º 39
0
def absolute_uri(url: Optional[str] = None) -> str:
    prefix = options.get("system.url-prefix")
    if not url:
        return prefix
    return urljoin(prefix.rstrip("/") + "/", url.lstrip("/"))
Exemplo n.º 40
0
def send_beacon():
    """
    Send a Beacon to a remote server operated by the Sentry team.

    See the documentation for more details.
    """
    from sentry import options
    from sentry.models import Broadcast, Organization, Project, Team, User

    install_id = options.get("sentry:install-id")
    if not install_id:
        install_id = sha1(uuid4().bytes).hexdigest()
        logger.info("beacon.generated-install-id", extra={"install_id": install_id})
        options.set("sentry:install-id", install_id)

    if not settings.SENTRY_BEACON:
        logger.info("beacon.skipped", extra={"install_id": install_id, "reason": "disabled"})
        return

    if settings.DEBUG:
        logger.info("beacon.skipped", extra={"install_id": install_id, "reason": "debug"})
        return

    end = timezone.now()
    events_24h = tsdb.get_sums(
        model=tsdb.models.internal, keys=["events.total"], start=end - timedelta(hours=24), end=end
    )["events.total"]

    # we need this to be explicitly configured and it defaults to None,
    # which is the same as False
    anonymous = options.get("beacon.anonymous") is not False

    payload = {
        "install_id": install_id,
        "version": sentry.get_version(),
        "docker": sentry.is_docker(),
        "data": {
            # TODO(dcramer): we'd also like to get an idea about the throughput
            # of the system (i.e. events in 24h)
            "users": User.objects.count(),
            "projects": Project.objects.count(),
            "teams": Team.objects.count(),
            "organizations": Organization.objects.count(),
            "events.24h": events_24h,
        },
        "packages": get_all_package_versions(),
        "anonymous": anonymous,
    }

    if not anonymous:
        payload["admin_email"] = options.get("system.admin-email")

    # TODO(dcramer): relay the response 'notices' as admin broadcasts
    try:
        request = safe_urlopen(BEACON_URL, json=payload, timeout=5)
        response = safe_urlread(request)
    except Exception:
        logger.warning("beacon.failed", exc_info=True, extra={"install_id": install_id})
        return
    else:
        logger.info("beacon.sent", extra={"install_id": install_id})

    data = json.loads(response)

    if "version" in data:
        options.set("sentry:latest_version", data["version"]["stable"])

    if "notices" in data:
        upstream_ids = set()
        for notice in data["notices"]:
            upstream_ids.add(notice["id"])
            defaults = {
                "title": notice["title"],
                "link": notice.get("link"),
                "message": notice["message"],
            }
            # XXX(dcramer): we're missing a unique constraint on upstream_id
            # so we're using a lock to work around that. In the future we'd like
            # to have a data migration to clean up the duplicates and add the constraint
            lock = locks.get(u"broadcasts:{}".format(notice["id"]), duration=60)
            with lock.acquire():
                affected = Broadcast.objects.filter(upstream_id=notice["id"]).update(**defaults)
                if not affected:
                    Broadcast.objects.create(upstream_id=notice["id"], **defaults)

        Broadcast.objects.filter(upstream_id__isnull=False).exclude(
            upstream_id__in=upstream_ids
        ).update(is_active=False)
Exemplo n.º 41
0
    def post(self, request):
        """
        Requests to Register a Relay
        ````````````````````````````

        Registers the relay with the sentry installation.  If a relay boots
        it will always attempt to invoke this endpoint.
        """
        try:
            json_data = json.loads(request.body)
        except ValueError:
            return Response({"detail": "No valid json body"}, status=status.HTTP_400_BAD_REQUEST)

        serializer = RelayRegisterChallengeSerializer(data=json_data)

        if not serializer.is_valid():
            return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

        if not is_version_supported(json_data.get("version")):
            return Response(
                {
                    "detail": "Relay version no longer supported, please upgrade to a more recent version"
                },
                status=status.HTTP_403_FORBIDDEN,
            )

        public_key = json_data.get("public_key")
        if not public_key:
            return Response({"detail": "Missing public key"}, status=status.HTTP_400_FORBIDDEN)

        if not settings.SENTRY_RELAY_OPEN_REGISTRATION and not is_internal_relay(
            request, public_key
        ):
            return Response(
                {"detail": "Relay is not allowed to register"}, status=status.HTTP_403_FORBIDDEN
            )

        sig = get_header_relay_signature(request)
        if not sig:
            return Response(
                {"detail": "Missing relay signature"}, status=status.HTTP_400_BAD_REQUEST
            )

        secret = options.get("system.secret-key")

        try:
            challenge = create_register_challenge(request.body, sig, secret)
        except Exception as exc:
            return Response(
                {"detail": str(exc).splitlines()[0]}, status=status.HTTP_400_BAD_REQUEST
            )

        relay_id = six.text_type(challenge["relay_id"])
        if relay_id != get_header_relay_id(request):
            return Response(
                {"detail": "relay_id in payload did not match header"},
                status=status.HTTP_400_BAD_REQUEST,
            )

        try:
            relay = Relay.objects.get(relay_id=relay_id)
        except Relay.DoesNotExist:
            pass
        else:
            if relay.public_key != six.text_type(public_key):
                # This happens if we have an ID collision or someone copies an existing id
                return Response(
                    {"detail": "Attempted to register agent with a different public key"},
                    status=status.HTTP_400_BAD_REQUEST,
                )

        return Response(serialize(challenge))
Exemplo n.º 42
0
    def post(self, request):
        """
        Registers a Relay
        `````````````````

        Registers the relay with the sentry installation.  If a relay boots
        it will always attempt to invoke this endpoint.
        """

        try:
            json_data = json.loads(request.body)
        except ValueError:
            return Response({"detail": "No valid json body"}, status=status.HTTP_400_BAD_REQUEST)

        serializer = RelayRegisterResponseSerializer(data=json_data)

        if not serializer.is_valid():
            return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

        sig = get_header_relay_signature(request)
        if not sig:
            return Response(
                {"detail": "Missing relay signature"}, status=status.HTTP_400_BAD_REQUEST
            )

        secret = options.get("system.secret-key")

        try:
            validated = validate_register_response(request.body, sig, secret)
        except UnpackErrorSignatureExpired:
            return Response({"detail": "Challenge expired"}, status=status.HTTP_401_UNAUTHORIZED)
        except Exception as exc:
            return Response(
                {"detail": str(exc).splitlines()[0]}, status=status.HTTP_400_BAD_REQUEST
            )

        relay_id = six.text_type(validated["relay_id"])
        version = six.text_type(validated["version"])
        public_key = validated["public_key"]

        if relay_id != get_header_relay_id(request):
            return Response(
                {"detail": "relay_id in payload did not match header"},
                status=status.HTTP_400_BAD_REQUEST,
            )

        is_internal = is_internal_relay(request, public_key)
        try:
            relay = Relay.objects.get(relay_id=relay_id)
        except Relay.DoesNotExist:
            relay = Relay.objects.create(
                relay_id=relay_id, public_key=public_key, is_internal=is_internal
            )
        else:
            relay.is_internal = is_internal
            relay.save()

        try:
            relay_usage = RelayUsage.objects.get(relay_id=relay_id, version=version)
        except RelayUsage.DoesNotExist:
            RelayUsage.objects.create(relay_id=relay_id, version=version, public_key=public_key)
        else:
            relay_usage.last_seen = timezone.now()
            relay_usage.public_key = public_key
            relay_usage.save()

        return Response(serialize({"relay_id": relay.relay_id}))
Exemplo n.º 43
0
 def _authorize(self):
     if self.data.get('token') != options.get('slack.verification-token'):
         self._error('slack.action.invalid-token')
         raise SlackRequestError(status=401)
Exemplo n.º 44
0
 def dsym_cache_path(self):
     return options.get('dsym.cache-path')
Exemplo n.º 45
0
def get_performance_facets(
    query,
    params,
    orderby=None,
    aggregate_column="duration",
    aggregate_function="avg",
    limit=20,
    referrer=None,
):
    """
    High-level API for getting 'facet map' results for performance data

    Performance facets are high frequency tags and the aggregate duration of
    their most frequent values

    query (str) Filter query string to create conditions from.
    params (Dict[str, str]) Filtering parameters with start, end, project_id, environment
    limit (int) The number of records to fetch.
    referrer (str|None) A referrer string to help locate the origin of this query.

    Returns Sequence[FacetResult]
    """
    with sentry_sdk.start_span(
        op="discover.discover", description="facets.filter_transform"
    ) as span:
        span.set_data("query", query)
        snuba_filter = get_filter(query, params)

        # Resolve the public aliases into the discover dataset names.
        snuba_filter, translated_columns = resolve_discover_aliases(snuba_filter)

    # Exclude tracing tags as they are noisy and generally not helpful.
    # TODO(markus): Tracing tags are no longer written but may still reside in DB.
    excluded_tags = ["tags_key", "NOT IN", ["trace", "trace.ctx", "trace.span", "project"]]

    # Sampling keys for multi-project results as we don't need accuracy
    # with that much data.
    sample = len(snuba_filter.filter_keys["project_id"]) > 2

    with sentry_sdk.start_span(op="discover.discover", description="facets.frequent_tags"):
        # Get the tag keys with the highest deviation
        key_names = raw_query(
            aggregations=[["stddevSamp", aggregate_column, "stddev"]],
            start=snuba_filter.start,
            end=snuba_filter.end,
            conditions=snuba_filter.conditions,
            filter_keys=snuba_filter.filter_keys,
            orderby=["-stddev", "tags_key"],
            groupby="tags_key",
            # TODO(Kevan): Check using having vs where before mainlining
            having=[excluded_tags],
            dataset=Dataset.Discover,
            limit=limit,
            referrer=referrer,
            turbo=sample,
        )
        top_tags = [r["tags_key"] for r in key_names["data"]]
        if not top_tags:
            return []

    results = []

    sampling_enabled = True
    options_sample_rate = options.get("discover2.tags_performance_facet_sample_rate") or 0.1

    sample_rate = options_sample_rate if sampling_enabled else None

    max_aggregate_tags = 20
    aggregate_tags = []
    for i, tag in enumerate(top_tags):
        if i >= len(top_tags) - max_aggregate_tags:
            aggregate_tags.append(tag)

    if orderby is None:
        orderby = []

    if aggregate_tags:
        with sentry_sdk.start_span(op="discover.discover", description="facets.aggregate_tags"):
            conditions = snuba_filter.conditions
            conditions.append(["tags_key", "IN", aggregate_tags])
            tag_values = raw_query(
                aggregations=[[aggregate_function, aggregate_column, "aggregate"]],
                conditions=conditions,
                start=snuba_filter.start,
                end=snuba_filter.end,
                filter_keys=snuba_filter.filter_keys,
                orderby=orderby + ["tags_key"],
                groupby=["tags_key", "tags_value"],
                dataset=Dataset.Discover,
                referrer=referrer,
                sample=sample_rate,
                turbo=sample_rate is not None,
                limitby=[TOP_VALUES_DEFAULT_LIMIT, "tags_key"],
            )
            results.extend(
                [
                    FacetResult(r["tags_key"], r["tags_value"], int(r["aggregate"]))
                    for r in tag_values["data"]
                ]
            )

    return results
Exemplo n.º 46
0
 def _get_config_id(self, project):
     return options.get("store.background-grouping-config-id")
Exemplo n.º 47
0
 def cache_path(self):
     return options.get("dsym.cache-path")
Exemplo n.º 48
0
 def get_client_id(self):
     return options.get("auth-google.client-id")
Exemplo n.º 49
0
 def get_client_secret(self):
     return options.get("auth-google.client-secret")
Exemplo n.º 50
0
def plugin_config(plugin, project, request):
    """
    Configure the plugin site wide.

    Returns a tuple composed of a redirection boolean and the content to
    be displayed.
    """
    NOTSET = object()

    plugin_key = plugin.get_conf_key()
    if project:
        form_class = plugin.project_conf_form
        template = plugin.project_conf_template
    else:
        form_class = plugin.site_conf_form
        template = plugin.site_conf_template

    test_results = None

    initials = plugin.get_form_initial(project)
    for field in form_class.base_fields:
        key = '%s:%s' % (plugin_key, field)
        if project:
            value = ProjectOption.objects.get_value(project, key, NOTSET)
        else:
            value = options.get(key)
        if value is not NOTSET:
            initials[field] = value

    form = form_class(request.POST or None,
                      initial=initials,
                      prefix=plugin_key)
    if form.is_valid():
        if 'action_test' in request.POST and plugin.is_testable():
            try:
                test_results = plugin.test_configuration(project)
            except Exception as exc:
                if hasattr(exc, 'read') and callable(exc.read):
                    test_results = '%s\n%s' % (exc, exc.read())
                else:
                    test_results = exc
            if test_results is None:
                test_results = 'No errors returned'
        else:
            for field, value in form.cleaned_data.iteritems():
                key = '%s:%s' % (plugin_key, field)
                if project:
                    ProjectOption.objects.set_value(project, key, value)
                else:
                    Option.objects.set_value(key, value)

            return ('redirect', None)

    # TODO(mattrobenolt): Reliably determine if a plugin is configured
    # if hasattr(plugin, 'is_configured'):
    #     is_configured = plugin.is_configured(project)
    # else:
    #     is_configured = True
    is_configured = True

    from django.template.loader import render_to_string
    return ('display',
            mark_safe(
                render_to_string(
                    template, {
                        'form': form,
                        'request': request,
                        'plugin': plugin,
                        'plugin_description': plugin.get_description() or '',
                        'plugin_test_results': test_results,
                        'plugin_is_configured': is_configured,
                    },
                    context_instance=RequestContext(request))))
Exemplo n.º 51
0
 def get_install_url(self):
     return options.get("github.apps-install-url")
Exemplo n.º 52
0
 def get_app_url(self):
     name = options.get('github-app.name')
     return 'https://github.com/apps/%s' % name
Exemplo n.º 53
0
 def has_apps_configured(self):
     return bool(
         options.get("github.apps-install-url")
         and options.get("github.integration-app-id")
         and options.get("github.integration-hook-secret")
         and options.get("github.integration-private-key"))
Exemplo n.º 54
0
 def get_maximum_quota(self, organization):
     """
     Return the maximum capable rate for an organization.
     """
     return (options.get('system.rate-limit'), 60)
Exemplo n.º 55
0
 def get_oauth_client_secret(self):
     return options.get("github-app.client-secret")
Exemplo n.º 56
0
 def get_idp_external_id(self, integration, host=None):
     return options.get("github-app.id")
Exemplo n.º 57
0
from __future__ import absolute_import

from django.conf import settings

from sentry import options
from sentry.utils.services import LazyServiceWrapper

from .base import Analytics  # NOQA
from .event_manager import default_manager
from .event import Attribute, Event  # NOQA


def get_backend_path(backend):
    try:
        backend = settings.SENTRY_ANALYTICS_ALIASES[backend]
    except KeyError:
        pass
    return backend


backend = LazyServiceWrapper(
    Analytics, get_backend_path(options.get('analytics.backend')),
    options.get('analytics.options'))
backend.expose(locals())

register = default_manager.register
 def _subject_prefix(self):
     if self.subject_prefix is not None:
         return self.subject_prefix
     return options.get('mail.subject-prefix')
Exemplo n.º 59
0
    def serialize(self, obj, attrs, user):
        from sentry.plugins import plugins

        data = super(DetailedProjectSerializer,
                     self).serialize(obj, attrs, user)
        data.update(
            {
                'latestRelease':
                attrs['latest_release'],
                'options': {
                    'sentry:csp_ignored_sources_defaults':
                    bool(attrs['options'].get(
                        'sentry:csp_ignored_sources_defaults', True)),
                    'sentry:csp_ignored_sources':
                    '\n'.join(attrs['options'].get(
                        'sentry:csp_ignored_sources', []) or []),
                    'sentry:reprocessing_active':
                    bool(attrs['options'].get(
                        'sentry:reprocessing_active', False)),
                    'filters:blacklisted_ips':
                    '\n'.join(attrs['options'].get(
                        'sentry:blacklisted_ips', [])),
                    u'filters:{}'.format(FilterTypes.RELEASES):
                    '\n'.join(attrs['options'].get(
                        u'sentry:{}'.format(FilterTypes.RELEASES), [])),
                    u'filters:{}'.format(FilterTypes.ERROR_MESSAGES):
                    '\n'.
                    join(attrs['options'].get(u'sentry:{}'.format(
                        FilterTypes.ERROR_MESSAGES), [])),
                    'feedback:branding':
                    attrs['options'].get('feedback:branding', '1') == '1',
                },
                'digestsMinDelay':
                attrs['options'].get(
                    'digests:mail:minimum_delay',
                    digests.minimum_delay,
                ),
                'digestsMaxDelay':
                attrs['options'].get(
                    'digests:mail:maximum_delay',
                    digests.maximum_delay,
                ),
                'subjectPrefix':
                attrs['options'].get('mail:subject_prefix', options.get('mail.subject-prefix')),
                'allowedDomains':
                attrs['options'].get(
                    'sentry:origins', ['*']),
                'resolveAge':
                int(attrs['options'].get('sentry:resolve_age', 0)),
                'dataScrubber':
                bool(attrs['options'].get('sentry:scrub_data', True)),
                'dataScrubberDefaults':
                bool(attrs['options'].get('sentry:scrub_defaults', True)),
                'safeFields':
                attrs['options'].get('sentry:safe_fields', []),
                'storeCrashReports': bool(attrs['options'].get('sentry:store_crash_reports', False)),
                'sensitiveFields':
                attrs['options'].get('sentry:sensitive_fields', []),
                'subjectTemplate':
                attrs['options'].get(
                    'mail:subject_template') or DEFAULT_SUBJECT_TEMPLATE.template,
                'securityToken': attrs['options'].get('sentry:token') or obj.get_security_token(),
                'securityTokenHeader': attrs['options'].get('sentry:token_header'),
                'verifySSL': bool(attrs['options'].get('sentry:verify_ssl', False)),
                'scrubIPAddresses': bool(attrs['options'].get('sentry:scrub_ip_address', False)),
                'scrapeJavaScript': bool(attrs['options'].get('sentry:scrape_javascript', True)),
                'organization':
                attrs['org'],
                'plugins':
                serialize(
                    [
                        plugin for plugin in plugins.configurable_for_project(obj, version=None)
                        if plugin.has_project_conf()
                    ], user, PluginSerializer(obj)
                ),
                'platforms': attrs['platforms'],
                'processingIssues': attrs['processing_issues'],
                'defaultEnvironment': attrs['options'].get('sentry:default_environment'),
                'relayPiiConfig': attrs['options'].get('sentry:relay_pii_config'),
            }
        )
        return data
Exemplo n.º 60
0
 def get_oauth_client_id(self):
     return options.get("github-app.client-id")