Exemplo n.º 1
0
    def should_filter(self, project, data, ip_address=None):
        # TODO(dcramer): read filters from options such as:
        # - ignore errors from spiders/bots
        # - ignore errors from legacy browsers
        if ip_address and not is_valid_ip(ip_address, project):
            return True

        return False
Exemplo n.º 2
0
    def should_filter(self, project, data, ip_address=None):
        # TODO(dcramer): read filters from options such as:
        # - ignore errors from spiders/bots
        # - ignore errors from legacy browsers
        if ip_address and not is_valid_ip(ip_address, project):
            return True

        return False
Exemplo n.º 3
0
    def should_filter(self, project, data, ip_address=None):
        # TODO(dcramer): read filters from options such as:
        # - ignore errors from spiders/bots
        # - ignore errors from legacy browsers
        if ip_address and not is_valid_ip(ip_address, project):
            return True

        for filter_cls in filters.all():
            filter_obj = filter_cls(project)
            if filter_obj.is_enabled() and filter_obj.test(data):
                return True

        return False
Exemplo n.º 4
0
    def should_filter(self, project, data, ip_address=None):
        # TODO(dcramer): read filters from options such as:
        # - ignore errors from spiders/bots
        # - ignore errors from legacy browsers
        if ip_address and not is_valid_ip(ip_address, project):
            return True

        for filter_cls in filters.all():
            filter_obj = filter_cls(project)
            if filter_obj.is_enabled() and filter_obj.test(data):
                return True

        return False
Exemplo n.º 5
0
 def is_valid_ip(self, ip, inputs):
     self.project.update_option('sentry:blacklisted_ips', inputs)
     return is_valid_ip(ip, self.project)
Exemplo n.º 6
0
    def process(self, request, project, auth, helper, data, **kwargs):
        metrics.incr('events.total')

        remote_addr = request.META['REMOTE_ADDR']
        event_received.send_robust(ip=remote_addr, sender=type(self))

        if not is_valid_ip(remote_addr, project):
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.project_total_blacklisted, project.id),
                (app.tsdb.models.organization_total_received,
                 project.organization_id),
                (app.tsdb.models.organization_total_blacklisted,
                 project.organization_id),
            ])
            metrics.incr('events.blacklisted')
            raise APIForbidden('Blacklisted IP address: %s' % (remote_addr, ))

        # TODO: improve this API (e.g. make RateLimit act on __ne__)
        rate_limit = safe_execute(app.quotas.is_rate_limited,
                                  project=project,
                                  _with_transaction=False)
        if isinstance(rate_limit, bool):
            rate_limit = RateLimit(is_limited=rate_limit, retry_after=None)

        # XXX(dcramer): when the rate limiter fails we drop events to ensure
        # it cannot cascade
        if rate_limit is None or rate_limit.is_limited:
            if rate_limit is None:
                helper.log.debug(
                    'Dropped event due to error with rate limiter')
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.project_total_rejected, project.id),
                (app.tsdb.models.organization_total_received,
                 project.organization_id),
                (app.tsdb.models.organization_total_rejected,
                 project.organization_id),
            ])
            metrics.incr('events.dropped')
            if rate_limit is not None:
                raise APIRateLimited(rate_limit.retry_after)
        else:
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.organization_total_received,
                 project.organization_id),
            ])

        content_encoding = request.META.get('HTTP_CONTENT_ENCODING', '')

        if isinstance(data, basestring):
            if content_encoding == 'gzip':
                data = helper.decompress_gzip(data)
            elif content_encoding == 'deflate':
                data = helper.decompress_deflate(data)
            elif not data.startswith('{'):
                data = helper.decode_and_decompress_data(data)
            data = helper.safely_load_json_string(data)

        # mutates data
        data = helper.validate_data(project, data)

        # mutates data
        manager = EventManager(data, version=auth.version)
        data = manager.normalize()

        org_options = OrganizationOption.objects.get_all_values(
            project.organization_id)

        if org_options.get('sentry:require_scrub_ip_address', False):
            scrub_ip_address = True
        else:
            scrub_ip_address = project.get_option('sentry:scrub_ip_address',
                                                  False)

        # insert IP address if not available
        if auth.is_public and not scrub_ip_address:
            helper.ensure_has_ip(data, remote_addr)

        event_id = data['event_id']

        # TODO(dcramer): ideally we'd only validate this if the event_id was
        # supplied by the user
        cache_key = 'ev:%s:%s' % (
            project.id,
            event_id,
        )

        if cache.get(cache_key) is not None:
            raise APIForbidden(
                'An event with the same ID already exists (%s)' % (event_id, ))

        if org_options.get('sentry:require_scrub_data', False):
            scrub_data = True
        else:
            scrub_data = project.get_option('sentry:scrub_data', True)

        if scrub_data:
            # We filter data immediately before it ever gets into the queue
            sensitive_fields_key = 'sentry:sensitive_fields'
            sensitive_fields = (org_options.get(sensitive_fields_key, []) +
                                project.get_option(sensitive_fields_key, []))

            if org_options.get('sentry:require_scrub_defaults', False):
                scrub_defaults = True
            else:
                scrub_defaults = project.get_option('sentry:scrub_defaults',
                                                    True)

            inst = SensitiveDataFilter(
                fields=sensitive_fields,
                include_defaults=scrub_defaults,
            )
            inst.apply(data)

        if scrub_ip_address:
            # We filter data immediately before it ever gets into the queue
            helper.ensure_does_not_have_ip(data)

        # mutates data (strips a lot of context if not queued)
        helper.insert_data_to_database(data)

        cache.set(cache_key, '', 60 * 5)

        helper.log.debug('New event received (%s)', event_id)

        return event_id
Exemplo n.º 7
0
    def process(self, request, project, auth, helper, data, **kwargs):
        metrics.incr('events.total')

        remote_addr = request.META['REMOTE_ADDR']
        event_received.send_robust(ip=remote_addr, sender=type(self))

        if not is_valid_ip(remote_addr, project):
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.project_total_blacklisted, project.id),
                (app.tsdb.models.organization_total_received, project.organization_id),
                (app.tsdb.models.organization_total_blacklisted, project.organization_id),
            ])
            metrics.incr('events.blacklisted')
            raise APIForbidden('Blacklisted IP address: %s' % (remote_addr,))

        # TODO: improve this API (e.g. make RateLimit act on __ne__)
        rate_limit = safe_execute(app.quotas.is_rate_limited, project=project,
                                  _with_transaction=False)
        if isinstance(rate_limit, bool):
            rate_limit = RateLimit(is_limited=rate_limit, retry_after=None)

        # XXX(dcramer): when the rate limiter fails we drop events to ensure
        # it cannot cascade
        if rate_limit is None or rate_limit.is_limited:
            if rate_limit is None:
                helper.log.debug('Dropped event due to error with rate limiter')
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.project_total_rejected, project.id),
                (app.tsdb.models.organization_total_received, project.organization_id),
                (app.tsdb.models.organization_total_rejected, project.organization_id),
            ])
            metrics.incr('events.dropped')
            raise APIRateLimited(rate_limit.retry_after)
        else:
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.organization_total_received, project.organization_id),
            ])

        content_encoding = request.META.get('HTTP_CONTENT_ENCODING', '')

        if isinstance(data, basestring):
            if content_encoding == 'gzip':
                data = helper.decompress_gzip(data)
            elif content_encoding == 'deflate':
                data = helper.decompress_deflate(data)
            elif not data.startswith('{'):
                data = helper.decode_and_decompress_data(data)
            data = helper.safely_load_json_string(data)

        # mutates data
        data = helper.validate_data(project, data)

        # mutates data
        manager = EventManager(data, version=auth.version)
        data = manager.normalize()

        scrub_ip_address = project.get_option('sentry:scrub_ip_address', False)

        # insert IP address if not available
        if auth.is_public and not scrub_ip_address:
            helper.ensure_has_ip(data, remote_addr)

        event_id = data['event_id']

        # TODO(dcramer): ideally we'd only validate this if the event_id was
        # supplied by the user
        cache_key = 'ev:%s:%s' % (project.id, event_id,)

        if cache.get(cache_key) is not None:
            raise APIForbidden('An event with the same ID already exists (%s)' % (event_id,))

        if project.get_option('sentry:scrub_data', True):
            # We filter data immediately before it ever gets into the queue
            inst = SensitiveDataFilter(project.get_option('sentry:sensitive_fields', None))
            inst.apply(data)

        if scrub_ip_address:
            # We filter data immediately before it ever gets into the queue
            helper.ensure_does_not_have_ip(data)

        # mutates data (strips a lot of context if not queued)
        helper.insert_data_to_database(data)

        cache.set(cache_key, '', 60 * 5)

        helper.log.debug('New event received (%s)', event_id)

        return event_id
Exemplo n.º 8
0
Arquivo: api.py Projeto: dcvz/sentry
    def process(self, request, project, auth, helper, data, **kwargs):
        metrics.incr('events.total')

        remote_addr = request.META['REMOTE_ADDR']
        event_received.send_robust(
            ip=remote_addr,
            sender=type(self),
        )

        if not data:
            raise APIError('No JSON data was found')

        if not is_valid_ip(remote_addr, project):
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.project_total_blacklisted, project.id),
                (app.tsdb.models.organization_total_received, project.organization_id),
                (app.tsdb.models.organization_total_blacklisted, project.organization_id),
            ])
            metrics.incr('events.blacklisted')
            raise APIForbidden('Blacklisted IP address: %s' % (remote_addr,))

        # TODO: improve this API (e.g. make RateLimit act on __ne__)
        rate_limit = safe_execute(app.quotas.is_rate_limited, project=project,
                                  _with_transaction=False)
        if isinstance(rate_limit, bool):
            rate_limit = RateLimit(is_limited=rate_limit, retry_after=None)

        # XXX(dcramer): when the rate limiter fails we drop events to ensure
        # it cannot cascade
        if rate_limit is None or rate_limit.is_limited:
            if rate_limit is None:
                helper.log.debug('Dropped event due to error with rate limiter')
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.project_total_rejected, project.id),
                (app.tsdb.models.organization_total_received, project.organization_id),
                (app.tsdb.models.organization_total_rejected, project.organization_id),
            ])
            metrics.incr('events.dropped')
            if rate_limit is not None:
                raise APIRateLimited(rate_limit.retry_after)
        else:
            app.tsdb.incr_multi([
                (app.tsdb.models.project_total_received, project.id),
                (app.tsdb.models.organization_total_received, project.organization_id),
            ])

        content_encoding = request.META.get('HTTP_CONTENT_ENCODING', '')

        if isinstance(data, (six.binary_type, six.text_type)):
            if content_encoding == 'gzip':
                data = helper.decompress_gzip(data)
            elif content_encoding == 'deflate':
                data = helper.decompress_deflate(data)
            elif data[0] != b'{':
                data = helper.decode_and_decompress_data(data)
            data = helper.safely_load_json_string(data)

        # mutates data
        data = helper.validate_data(project, data)

        if 'sdk' not in data:
            sdk = helper.parse_client_as_sdk(auth.client)
            if sdk:
                data['sdk'] = sdk

        # mutates data
        manager = EventManager(data, version=auth.version)
        data = manager.normalize()

        org_options = OrganizationOption.objects.get_all_values(project.organization_id)

        if org_options.get('sentry:require_scrub_ip_address', False):
            scrub_ip_address = True
        else:
            scrub_ip_address = project.get_option('sentry:scrub_ip_address', False)

        # insert IP address if not available and wanted
        if not scrub_ip_address:
            helper.ensure_has_ip(
                data, remote_addr, set_if_missing=auth.is_public or
                data.get('platform') in ('javascript', 'cocoa', 'objc'))

        event_id = data['event_id']

        # TODO(dcramer): ideally we'd only validate this if the event_id was
        # supplied by the user
        cache_key = 'ev:%s:%s' % (project.id, event_id,)

        if cache.get(cache_key) is not None:
            raise APIForbidden('An event with the same ID already exists (%s)' % (event_id,))

        if org_options.get('sentry:require_scrub_data', False):
            scrub_data = True
        else:
            scrub_data = project.get_option('sentry:scrub_data', True)

        if scrub_data:
            # We filter data immediately before it ever gets into the queue
            sensitive_fields_key = 'sentry:sensitive_fields'
            sensitive_fields = (
                org_options.get(sensitive_fields_key, []) +
                project.get_option(sensitive_fields_key, [])
            )

            if org_options.get('sentry:require_scrub_defaults', False):
                scrub_defaults = True
            else:
                scrub_defaults = project.get_option('sentry:scrub_defaults', True)

            inst = SensitiveDataFilter(
                fields=sensitive_fields,
                include_defaults=scrub_defaults,
            )
            inst.apply(data)

        if scrub_ip_address:
            # We filter data immediately before it ever gets into the queue
            helper.ensure_does_not_have_ip(data)

        # mutates data (strips a lot of context if not queued)
        helper.insert_data_to_database(data)

        cache.set(cache_key, '', 60 * 5)

        helper.log.debug('New event received (%s)', event_id)

        event_accepted.send_robust(
            ip=remote_addr,
            data=data,
            project=project,
            sender=type(self),
        )

        return event_id
Exemplo n.º 9
0
    def process(self, request, project, auth, helper, data, **kwargs):
        metrics.incr("events.total")

        remote_addr = request.META["REMOTE_ADDR"]
        event_received.send_robust(ip=remote_addr, sender=type(self))

        if not is_valid_ip(remote_addr, project):
            app.tsdb.incr_multi(
                [
                    (app.tsdb.models.project_total_received, project.id),
                    (app.tsdb.models.project_total_blacklisted, project.id),
                    (app.tsdb.models.organization_total_received, project.organization_id),
                    (app.tsdb.models.organization_total_blacklisted, project.organization_id),
                ]
            )
            metrics.incr("events.blacklisted")
            raise APIForbidden("Blacklisted IP address: %s" % (remote_addr,))

        # TODO: improve this API (e.g. make RateLimit act on __ne__)
        rate_limit = safe_execute(app.quotas.is_rate_limited, project=project, _with_transaction=False)
        if isinstance(rate_limit, bool):
            rate_limit = RateLimit(is_limited=rate_limit, retry_after=None)

        # XXX(dcramer): when the rate limiter fails we drop events to ensure
        # it cannot cascade
        if rate_limit is None or rate_limit.is_limited:
            if rate_limit is None:
                helper.log.debug("Dropped event due to error with rate limiter")
            app.tsdb.incr_multi(
                [
                    (app.tsdb.models.project_total_received, project.id),
                    (app.tsdb.models.project_total_rejected, project.id),
                    (app.tsdb.models.organization_total_received, project.organization_id),
                    (app.tsdb.models.organization_total_rejected, project.organization_id),
                ]
            )
            metrics.incr("events.dropped")
            if rate_limit is not None:
                raise APIRateLimited(rate_limit.retry_after)
        else:
            app.tsdb.incr_multi(
                [
                    (app.tsdb.models.project_total_received, project.id),
                    (app.tsdb.models.organization_total_received, project.organization_id),
                ]
            )

        content_encoding = request.META.get("HTTP_CONTENT_ENCODING", "")

        if isinstance(data, basestring):
            if content_encoding == "gzip":
                data = helper.decompress_gzip(data)
            elif content_encoding == "deflate":
                data = helper.decompress_deflate(data)
            elif not data.startswith("{"):
                data = helper.decode_and_decompress_data(data)
            data = helper.safely_load_json_string(data)

        # mutates data
        data = helper.validate_data(project, data)

        if "sdk" not in data:
            sdk = helper.parse_client_as_sdk(auth.client)
            if sdk:
                data["sdk"] = sdk

        # mutates data
        manager = EventManager(data, version=auth.version)
        data = manager.normalize()

        org_options = OrganizationOption.objects.get_all_values(project.organization_id)

        if org_options.get("sentry:require_scrub_ip_address", False):
            scrub_ip_address = True
        else:
            scrub_ip_address = project.get_option("sentry:scrub_ip_address", False)

        # insert IP address if not available and wanted
        if not scrub_ip_address:
            helper.ensure_has_ip(
                data,
                remote_addr,
                set_if_missing=auth.is_public or data.get("platform") in ("javascript", "cocoa", "objc"),
            )

        event_id = data["event_id"]

        # TODO(dcramer): ideally we'd only validate this if the event_id was
        # supplied by the user
        cache_key = "ev:%s:%s" % (project.id, event_id)

        if cache.get(cache_key) is not None:
            raise APIForbidden("An event with the same ID already exists (%s)" % (event_id,))

        if org_options.get("sentry:require_scrub_data", False):
            scrub_data = True
        else:
            scrub_data = project.get_option("sentry:scrub_data", True)

        if scrub_data:
            # We filter data immediately before it ever gets into the queue
            sensitive_fields_key = "sentry:sensitive_fields"
            sensitive_fields = org_options.get(sensitive_fields_key, []) + project.get_option(sensitive_fields_key, [])

            if org_options.get("sentry:require_scrub_defaults", False):
                scrub_defaults = True
            else:
                scrub_defaults = project.get_option("sentry:scrub_defaults", True)

            inst = SensitiveDataFilter(fields=sensitive_fields, include_defaults=scrub_defaults)
            inst.apply(data)

        if scrub_ip_address:
            # We filter data immediately before it ever gets into the queue
            helper.ensure_does_not_have_ip(data)

        # mutates data (strips a lot of context if not queued)
        helper.insert_data_to_database(data)

        cache.set(cache_key, "", 60 * 5)

        helper.log.debug("New event received (%s)", event_id)

        event_accepted.send_robust(ip=remote_addr, data=data, project=project, sender=type(self))

        return event_id