Beispiel #1
0
def test_is_rate_limited_script():
    now = int(time.time())

    cluster = clusters.get('default')
    client = cluster.get_local_client(six.next(iter(cluster.hosts)))

    # The item should not be rate limited by either key.
    assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))
                ) == [False, False]

    # The item should be rate limited by the first key (1).
    assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))
                ) == [True, False]

    # The item should still be rate limited by the first key (1), but *not*
    # rate limited by the second key (2) even though this is the third time
    # we've checked the quotas. This ensures items that are rejected by a lower
    # quota don't affect unrelated items that share a parent quota.
    assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))
                ) == [True, False]

    assert client.get('foo') == '1'
    assert 59 <= client.ttl('foo') <= 60

    assert client.get('bar') == '1'
    assert 119 <= client.ttl('bar') <= 120
Beispiel #2
0
def pytest_runtest_teardown(item):
    if not os.environ.get("USE_SNUBA", False):
        from sentry import tsdb

        # TODO(dcramer): this only works if this is the correct tsdb backend
        tsdb.flush()

    # XXX(dcramer): only works with DummyNewsletter
    from sentry import newsletter

    if hasattr(newsletter.backend, "clear"):
        newsletter.backend.clear()

    from sentry.utils.redis import clusters

    with clusters.get("default").all() as client:
        client.flushdb()

    from celery.task.control import discard_all

    discard_all()

    from sentry.models import OrganizationOption, ProjectOption, UserOption

    for model in (OrganizationOption, ProjectOption, UserOption):
        model.objects.clear_local_cache()

    Hub.main.bind_client(None)
Beispiel #3
0
    def test_truncate_timeline_script(self):
        cluster = clusters.get('default')
        client = cluster.get_local_client(six.next(iter(cluster.hosts)))

        timeline = 'timeline'

        # Preload some fake records (the contents don't matter.)
        records = list(itertools.islice(self.records, 10))
        for record in records:
            client.zadd(timeline, record.timestamp, record.key)
            client.set(make_record_key(timeline, record.key), 'data')

        with self.assertChanges(lambda: client.zcard(timeline),
                                before=10,
                                after=5):
            truncate_timeline(client, (timeline, ), (5, timeline))

            # Ensure the early records don't exist.
            for record in records[:5]:
                assert not client.zscore(timeline, record.key)
                assert not client.exists(make_record_key(timeline, record.key))

            # Ensure the later records do exist.
            for record in records[-5:]:
                assert client.zscore(timeline,
                                     record.key) == float(record.timestamp)
                assert client.exists(make_record_key(timeline, record.key))
Beispiel #4
0
def test_is_rate_limited_script():
    now = int(time.time())

    cluster = clusters.get('default')
    client = cluster.get_local_client(six.next(iter(cluster.hosts)))

    # The item should not be rate limited by either key.
    assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \
        [False, False]

    # The item should be rate limited by the first key (1).
    assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \
        [True, False]

    # The item should still be rate limited by the first key (1), but *not*
    # rate limited by the second key (2) even though this is the third time
    # we've checked the quotas. This ensures items that are rejected by a lower
    # quota don't affect unrelated items that share a parent quota.
    assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \
        [True, False]

    assert client.get('foo') == '1'
    assert 59 <= client.ttl('foo') <= 60

    assert client.get('bar') == '1'
    assert 119 <= client.ttl('bar') <= 120
Beispiel #5
0
    def test_ensure_timeline_scheduled_script(self):
        cluster = clusters.get('default')
        client = cluster.get_local_client(six.next(iter(cluster.hosts)))

        timeline = 'timeline'
        timestamp = 100.0

        waiting_set_size = functools.partial(client.zcard, 'waiting')
        ready_set_size = functools.partial(client.zcard, 'ready')

        timeline_score_in_waiting_set = functools.partial(client.zscore, 'waiting', timeline)
        timeline_score_in_ready_set = functools.partial(client.zscore, 'ready', timeline)

        keys = ('waiting', 'ready', 'last-processed')

        # The first addition should cause the timeline to be added to the ready set.
        with self.assertChanges(ready_set_size, before=0, after=1), \
                self.assertChanges(timeline_score_in_ready_set, before=None, after=timestamp):
            assert ensure_timeline_scheduled(client, keys, (timeline, timestamp, 1, 10)) == 1

        # Adding it again with a timestamp in the future should not change the schedule time.
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertDoesNotChange(ready_set_size), \
                self.assertDoesNotChange(timeline_score_in_ready_set):
            assert ensure_timeline_scheduled(
                client, keys, (timeline, timestamp + 50, 1, 10)) is None

        # Move the timeline from the ready set to the waiting set.
        client.zrem('ready', timeline)
        client.zadd('waiting', timestamp, timeline)
        client.set('last-processed', timestamp)

        increment = 1
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp + increment):
            assert ensure_timeline_scheduled(
                client, keys, (timeline, timestamp, increment, 10)) is None

        # Make sure the schedule respects the maximum value.
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp + 1, after=timestamp):
            assert ensure_timeline_scheduled(
                client, keys, (timeline, timestamp, increment, 0)) is None

        # Test to ensure a missing last processed timestamp can be handled
        # correctly (chooses minimum of schedule value and record timestamp.)
        client.zadd('waiting', timestamp, timeline)
        client.delete('last-processed')
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertDoesNotChange(timeline_score_in_waiting_set):
            assert ensure_timeline_scheduled(
                client, keys, (timeline, timestamp + 100, increment, 10)) is None

        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp - 100):
            assert ensure_timeline_scheduled(
                client, keys, (timeline, timestamp - 100, increment, 10)) is None
Beispiel #6
0
def test_is_rate_limited_script():
    now = int(time.time())

    cluster = clusters.get('default')
    client = cluster.get_local_client(six.next(iter(cluster.hosts)))

    # The item should not be rate limited by either key.
    assert list(
        map(
            bool,
            is_rate_limited(client, ('foo', 'r:foo', 'bar', 'r:bar'),
                            (1, now + 60, 2, now + 120)))) == [False, False]

    # The item should be rate limited by the first key (1).
    assert list(
        map(
            bool,
            is_rate_limited(client, ('foo', 'r:foo', 'bar', 'r:bar'),
                            (1, now + 60, 2, now + 120)))) == [True, False]

    # The item should still be rate limited by the first key (1), but *not*
    # rate limited by the second key (2) even though this is the third time
    # we've checked the quotas. This ensures items that are rejected by a lower
    # quota don't affect unrelated items that share a parent quota.
    assert list(
        map(
            bool,
            is_rate_limited(client, ('foo', 'r:foo', 'bar', 'r:bar'),
                            (1, now + 60, 2, now + 120)))) == [True, False]

    assert client.get('foo') == '1'
    assert 59 <= client.ttl('foo') <= 60

    assert client.get('bar') == '1'
    assert 119 <= client.ttl('bar') <= 120

    # make sure "refund/negative" keys haven't been incremented
    assert client.get('r:foo') is None
    assert client.get('r:bar') is None

    # Test that refunded quotas work
    client.set('apple', 5)
    # increment
    is_rate_limited(client, ('orange', 'baz'), (1, now + 60))
    # test that it's rate limited without refund
    assert list(
        map(bool, is_rate_limited(client, ('orange', 'baz'),
                                  (1, now + 60)))) == [
                                      True,
                                  ]
    # test that refund key is used
    assert list(
        map(bool, is_rate_limited(client, ('orange', 'apple'),
                                  (1, now + 60)))) == [
                                      False,
                                  ]
Beispiel #7
0
def pytest_runtest_teardown(item):
    from sentry.app import tsdb
    tsdb.flush()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    from celery.task.control import discard_all
    discard_all()
Beispiel #8
0
def pytest_runtest_teardown(item):
    from sentry.app import tsdb
    tsdb.flush()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    from celery.task.control import discard_all
    discard_all()
Beispiel #9
0
def pytest_runtest_teardown(item):
    from sentry import tsdb
    # TODO(dcramer): this only works if this is the correct tsdb backend
    tsdb.backend.flush()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    from celery.task.control import discard_all
    discard_all()
Beispiel #10
0
def pytest_runtest_teardown(item):
    from sentry import tsdb
    # TODO(dcramer): this only works if this is the correct tsdb backend
    tsdb.backend.flush()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    from celery.task.control import discard_all
    discard_all()
Beispiel #11
0
    def test_ensure_timeline_scheduled_script(self):
        cluster = clusters.get('default')
        client = cluster.get_local_client(six.next(iter(cluster.hosts)))

        timeline = 'timeline'
        timestamp = 100.0

        waiting_set_size = functools.partial(client.zcard, 'waiting')
        ready_set_size = functools.partial(client.zcard, 'ready')

        timeline_score_in_waiting_set = functools.partial(client.zscore, 'waiting', timeline)
        timeline_score_in_ready_set = functools.partial(client.zscore, 'ready', timeline)

        keys = ('waiting', 'ready', 'last-processed')

        # The first addition should cause the timeline to be added to the ready set.
        with self.assertChanges(ready_set_size, before=0, after=1), \
                self.assertChanges(timeline_score_in_ready_set, before=None, after=timestamp):
            assert ensure_timeline_scheduled(client, keys, (timeline, timestamp, 1, 10)) == 1

        # Adding it again with a timestamp in the future should not change the schedule time.
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertDoesNotChange(ready_set_size), \
                self.assertDoesNotChange(timeline_score_in_ready_set):
            assert ensure_timeline_scheduled(client, keys, (timeline, timestamp + 50, 1, 10)) is None

        # Move the timeline from the ready set to the waiting set.
        client.zrem('ready', timeline)
        client.zadd('waiting', timestamp, timeline)
        client.set('last-processed', timestamp)

        increment = 1
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp + increment):
            assert ensure_timeline_scheduled(client, keys, (timeline, timestamp, increment, 10)) is None

        # Make sure the schedule respects the maximum value.
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp + 1, after=timestamp):
            assert ensure_timeline_scheduled(client, keys, (timeline, timestamp, increment, 0)) is None

        # Test to ensure a missing last processed timestamp can be handled
        # correctly (chooses minimum of schedule value and record timestamp.)
        client.zadd('waiting', timestamp, timeline)
        client.delete('last-processed')
        with self.assertDoesNotChange(waiting_set_size), \
                self.assertDoesNotChange(timeline_score_in_waiting_set):
            assert ensure_timeline_scheduled(client, keys, (timeline, timestamp + 100, increment, 10)) is None

        with self.assertDoesNotChange(waiting_set_size), \
                self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp - 100):
            assert ensure_timeline_scheduled(client, keys, (timeline, timestamp - 100, increment, 10)) is None
Beispiel #12
0
def test_is_rate_limited_script():
    now = int(time.time())

    cluster = clusters.get('default')
    client = cluster.get_local_client(six.next(iter(cluster.hosts)))

    # The item should not be rate limited by either key.
    assert list(map(bool, is_rate_limited(
                client, ('foo', 'r:foo', 'bar', 'r:bar'), (1, now + 60, 2, now + 120)))
                ) == [False, False]

    # The item should be rate limited by the first key (1).
    assert list(map(bool, is_rate_limited(
                client, ('foo', 'r:foo', 'bar', 'r:bar'), (1, now + 60, 2, now + 120)))
                ) == [True, False]

    # The item should still be rate limited by the first key (1), but *not*
    # rate limited by the second key (2) even though this is the third time
    # we've checked the quotas. This ensures items that are rejected by a lower
    # quota don't affect unrelated items that share a parent quota.
    assert list(map(bool, is_rate_limited(
                client, ('foo', 'r:foo', 'bar', 'r:bar'), (1, now + 60, 2, now + 120)))
                ) == [True, False]

    assert client.get('foo') == '1'
    assert 59 <= client.ttl('foo') <= 60

    assert client.get('bar') == '1'
    assert 119 <= client.ttl('bar') <= 120

    # make sure "refund/negative" keys haven't been incremented
    assert client.get('r:foo') is None
    assert client.get('r:bar') is None

    # Test that refunded quotas work
    client.set('apple', 5)
    # increment
    is_rate_limited(
        client, ('orange', 'baz'), (1, now + 60)
    )
    # test that it's rate limited without refund
    assert list(map(bool, is_rate_limited(
        client, ('orange', 'baz'), (1, now + 60)
    ))) == [True, ]
    # test that refund key is used
    assert list(map(bool, is_rate_limited(
        client, ('orange', 'apple'), (1, now + 60)
    ))) == [False, ]
Beispiel #13
0
def pytest_runtest_teardown(item):
    from sentry import tsdb
    # TODO(dcramer): this only works if this is the correct tsdb backend
    tsdb.flush()

    # XXX(dcramer): only works with DummyNewsletter
    from sentry import newsletter
    if hasattr(newsletter.backend, 'clear'):
        newsletter.backend.clear()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    from celery.task.control import discard_all
    discard_all()
Beispiel #14
0
def pytest_runtest_teardown(item):
    from sentry import tsdb
    # TODO(dcramer): this only works if this is the correct tsdb backend
    tsdb.flush()

    # XXX(dcramer): only works with DummyNewsletter
    from sentry import newsletter
    if hasattr(newsletter.backend, 'clear'):
        newsletter.backend.clear()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    from celery.task.control import discard_all
    discard_all()
Beispiel #15
0
    def _test_pipeline(self, flow):
        initial_state = {
            "org_id": self.organization.id,
            "flow": flow,
            "provider_model_id": self.auth_provider.id,
            "provider_key": None,
        }
        local_client = clusters.get("default").get_local_client_for_key(self.auth_key)
        local_client.set(self.auth_key, json.dumps(initial_state))

        helper = AuthHelper.get_for_request(self.request)
        helper.initialize()
        assert helper.is_valid()

        first_step = helper.current_step()
        assert first_step.status_code == 200

        next_step = helper.next_step()
        assert next_step.status_code == 302
        return next_step
Beispiel #16
0
def pytest_runtest_teardown(item):
    from sentry import tsdb
    # TODO(dcramer): this only works if this is the correct tsdb backend
    tsdb.flush()

    # XXX(dcramer): only works with DummyNewsletter
    from sentry import newsletter
    if hasattr(newsletter.backend, 'clear'):
        newsletter.backend.clear()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    from celery.task.control import discard_all
    discard_all()

    from sentry.models import OrganizationOption, ProjectOption, UserOption
    for model in (OrganizationOption, ProjectOption, UserOption):
        model.objects.clear_local_cache()
Beispiel #17
0
    def test_truncate_timeline_script(self):
        cluster = clusters.get('default')
        client = cluster.get_local_client(six.next(iter(cluster.hosts)))

        timeline = 'timeline'

        # Preload some fake records (the contents don't matter.)
        records = list(itertools.islice(self.records, 10))
        for record in records:
            client.zadd(timeline, record.timestamp, record.key)
            client.set(make_record_key(timeline, record.key), 'data')

        with self.assertChanges(lambda: client.zcard(timeline), before=10, after=5):
            truncate_timeline(client, (timeline,), (5, timeline))

            # Ensure the early records don't exist.
            for record in records[:5]:
                assert not client.zscore(timeline, record.key)
                assert not client.exists(make_record_key(timeline, record.key))

            # Ensure the later records do exist.
            for record in records[-5:]:
                assert client.zscore(timeline, record.key) == float(record.timestamp)
                assert client.exists(make_record_key(timeline, record.key))
Beispiel #18
0
def pytest_configure(config):
    # HACK: Only needed for testing!
    os.environ.setdefault('_SENTRY_SKIP_CONFIGURATION', '1')

    os.environ.setdefault('RECAPTCHA_TESTING', 'True')
    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get('DB', 'postgres')
        if test_db == 'mysql':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.mysql',
                'NAME': 'sentry',
                'USER': '******',
                'HOST': '127.0.0.1',
            })
            # mysql requires running full migration all the time
            settings.SOUTH_TESTS_MIGRATE = True
        elif test_db == 'postgres':
            settings.DATABASES['default'].update({
                'ENGINE': 'sentry.db.postgres',
                'USER': '******',
                'NAME': 'sentry',
            })
            # postgres requires running full migration all the time
            # since it has to install stored functions which come from
            # an actual migration.
            settings.SOUTH_TESTS_MIGRATE = True
        elif test_db == 'sqlite':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.sqlite3',
                'NAME': ':memory:',
            })
            settings.SOUTH_TESTS_MIGRATE = os.environ.get('SENTRY_SOUTH_TESTS_MIGRATE', '1') == '1'
        else:
            raise RuntimeError('oops, wrong database: %r' % test_db)

    settings.TEMPLATE_DEBUG = True

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
        'tests',
    )
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = [
        'django.contrib.auth.hashers.MD5PasswordHasher',
    ]

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
    middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    # enable draft features
    settings.SENTRY_OPTIONS['mail.enable-replies'] = True

    settings.SENTRY_ALLOW_ORIGIN = '*'

    settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
    settings.SENTRY_TSDB_OPTIONS = {}

    settings.RECAPTCHA_PUBLIC_KEY = 'a' * 40
    settings.RECAPTCHA_PRIVATE_KEY = 'b' * 40

    settings.BROKER_BACKEND = 'memory'
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DEBUG_VIEWS = True

    settings.DISABLE_RAVEN = True

    settings.CACHES = {
        'default': {
            'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
        }
    }

    if not hasattr(settings, 'SENTRY_OPTIONS'):
        settings.SENTRY_OPTIONS = {}

    settings.SENTRY_OPTIONS.update({
        'redis.clusters': {
            'default': {
                'hosts': {
                    0: {
                        'db': 9,
                    },
                },
            },
        },
        'mail.backend': 'django.core.mail.backends.locmem.EmailBackend',
        'system.url-prefix': 'http://testserver',
    })

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch('socket.getfqdn', return_value='localhost')
    patcher.start()

    from sentry.runner.initializer import (
        bootstrap_options, configure_structlog, initialize_receivers, fix_south,
        bind_cache_to_option_store)

    bootstrap_options(settings)
    configure_structlog()
    fix_south(settings)

    bind_cache_to_option_store()

    initialize_receivers()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    # force celery registration
    from sentry.celery import app  # NOQA

    # disable DISALLOWED_IPS
    from sentry import http
    http.DISALLOWED_IPS = set()
Beispiel #19
0
def pytest_configure(config):
    # HACK: Only needed for testing!
    os.environ.setdefault("_SENTRY_SKIP_CONFIGURATION", "1")

    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sentry.conf.server")

    # override docs which are typically synchronized from an upstream server
    # to ensure tests are consistent
    os.environ.setdefault(
        "INTEGRATION_DOC_FOLDER",
        os.path.join(TEST_ROOT, "fixtures", "integration-docs"))
    from sentry.utils import integrationdocs

    integrationdocs.DOC_FOLDER = os.environ["INTEGRATION_DOC_FOLDER"]

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get("DB", "postgres")
        if test_db == "postgres":
            settings.DATABASES["default"].update({
                "ENGINE": "sentry.db.postgres",
                "USER": "******",
                "NAME": "sentry",
                "HOST": "127.0.0.1",
            })
            # postgres requires running full migration all the time
            # since it has to install stored functions which come from
            # an actual migration.
        else:
            raise RuntimeError("oops, wrong database: %r" % test_db)

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ("tests", )
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = "sentry.cache.django.DjangoCache"
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = [
        "django.contrib.auth.hashers.MD5PasswordHasher"
    ]

    settings.AUTH_PASSWORD_VALIDATORS = []

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index("sentry.middleware.sudo.SudoMiddleware")
    middleware[sudo] = "sentry.testutils.middleware.SudoMiddleware"
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    settings.SENTRY_OPTIONS["cloudflare.secret-key"] = "cloudflare-secret-key"

    # enable draft features
    settings.SENTRY_OPTIONS["mail.enable-replies"] = True

    settings.SENTRY_ALLOW_ORIGIN = "*"

    settings.SENTRY_TSDB = "sentry.tsdb.inmemory.InMemoryTSDB"
    settings.SENTRY_TSDB_OPTIONS = {}

    if settings.SENTRY_NEWSLETTER == "sentry.newsletter.base.Newsletter":
        settings.SENTRY_NEWSLETTER = "sentry.newsletter.dummy.DummyNewsletter"
        settings.SENTRY_NEWSLETTER_OPTIONS = {}

    settings.BROKER_BACKEND = "memory"
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DEBUG_VIEWS = True

    settings.SENTRY_ENCRYPTION_SCHEMES = ()

    settings.DISABLE_RAVEN = True

    settings.CACHES = {
        "default": {
            "BACKEND": "django.core.cache.backends.locmem.LocMemCache"
        },
        "nodedata": {
            "BACKEND": "django.core.cache.backends.locmem.LocMemCache"
        },
    }

    if os.environ.get("USE_SNUBA", False):
        settings.SENTRY_SEARCH = "sentry.search.snuba.EventsDatasetSnubaSearchBackend"
        settings.SENTRY_TSDB = "sentry.tsdb.redissnuba.RedisSnubaTSDB"
        settings.SENTRY_EVENTSTREAM = "sentry.eventstream.snuba.SnubaEventStream"

    if not hasattr(settings, "SENTRY_OPTIONS"):
        settings.SENTRY_OPTIONS = {}

    settings.SENTRY_OPTIONS.update({
        "redis.clusters": {
            "default": {
                "hosts": {
                    0: {
                        "db": 9
                    }
                }
            }
        },
        "mail.backend": "django.core.mail.backends.locmem.EmailBackend",
        "system.url-prefix": "http://testserver",
        "slack.client-id": "slack-client-id",
        "slack.client-secret": "slack-client-secret",
        "slack.verification-token": "slack-verification-token",
        "github-app.name": "sentry-test-app",
        "github-app.client-id": "github-client-id",
        "github-app.client-secret": "github-client-secret",
        "vsts.client-id": "vsts-client-id",
        "vsts.client-secret": "vsts-client-secret",
    })

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch("socket.getfqdn", return_value="localhost")
    patcher.start()

    if not settings.MIGRATIONS_TEST_MIGRATE:
        # Migrations for the "sentry" app take a long time to run, which makes test startup time slow in dev.
        # This is a hack to force django to sync the database state from the models rather than use migrations.
        settings.MIGRATION_MODULES["sentry"] = None

    from sentry.runner.initializer import (
        bind_cache_to_option_store,
        bootstrap_options,
        configure_structlog,
        initialize_receivers,
        monkeypatch_model_unpickle,
        monkeypatch_django_migrations,
        setup_services,
    )

    bootstrap_options(settings)
    configure_structlog()

    monkeypatch_model_unpickle()

    import django

    django.setup()

    monkeypatch_django_migrations()

    bind_cache_to_option_store()

    initialize_receivers()
    setup_services()
    register_extensions()

    from sentry.utils.redis import clusters

    with clusters.get("default").all() as client:
        client.flushdb()

    # force celery registration
    from sentry.celery import app  # NOQA

    # disable DISALLOWED_IPS
    from sentry import http

    http.DISALLOWED_IPS = set()
 def _client(self):
     return clusters.get('default').get_local_client_for_key(self.redis_key)
Beispiel #21
0
 def _client(self):
     return clusters.get('default').get_local_client_for_key(self.redis_key)
 def cluster(self):
     return clusters.get('default')
Beispiel #23
0
 def _client(self):
     return clusters.get("default").get_local_client_for_key(self.auth_key)
Beispiel #24
0
import time

from sentry.utils.dates import to_datetime, to_timestamp
from sentry.models import Project, Group, Event

from django.utils import timezone
from django.conf import settings

MAX_RECENT = 15
RECENT_HOURS = 24 * 30

# The Redis cluster manager (``clusters``) was added in Sentry 8.2 (GH-2714)
# and replaces ``make_rb_cluster`` (which will be removed in a future version.)
try:
    from sentry.utils.redis import clusters
    cluster = clusters.get('default')
except ImportError:
    from sentry.utils.redis import make_rb_cluster
    cluster = make_rb_cluster(settings.SENTRY_REDIS_OPTIONS['hosts'])


def get_key(tenant):
    return 'sentry-hipchat-ac:%s:mentions' % tenant.id


def get_recent_mentions(tenant):
    client = cluster.get_routing_client()
    key = get_key(tenant)
    ids = [
        x for x in client.zrangebyscore(key,
                                        time.time() -
Beispiel #25
0
def pytest_configure(config):
    # HACK: Only needed for testing!
    os.environ.setdefault('_SENTRY_SKIP_CONFIGURATION', '1')

    os.environ.setdefault('RECAPTCHA_TESTING', 'True')
    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')

    settings.SOUTH_TESTS_MIGRATE = os.environ.get('SENTRY_SOUTH_TESTS_MIGRATE',
                                                  '1') == '1'

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get('DB', 'postgres')
        if test_db == 'mysql':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.mysql',
                'NAME': 'sentry',
                'USER': '******',
            })
            # mysql requires running full migration all the time
            settings.SOUTH_TESTS_MIGRATE = True
        elif test_db == 'postgres':
            settings.DATABASES['default'].update({
                'ENGINE': 'sentry.db.postgres',
                'USER': '******',
                'NAME': 'sentry',
            })
            # postgres requires running full migration all the time
            # since it has to install stored functions which come from
            # an actual migration.
            settings.SOUTH_TESTS_MIGRATE = True
        elif test_db == 'sqlite':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.sqlite3',
                'NAME': ':memory:',
            })

    settings.TEMPLATE_DEBUG = True

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ('tests', )
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = [
        'django.contrib.auth.hashers.MD5PasswordHasher',
    ]

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
    middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    # enable draft features
    settings.SENTRY_OPTIONS['mail.enable-replies'] = True

    settings.SENTRY_ALLOW_ORIGIN = '*'

    settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
    settings.SENTRY_TSDB_OPTIONS = {}

    settings.RECAPTCHA_PUBLIC_KEY = 'a' * 40
    settings.RECAPTCHA_PRIVATE_KEY = 'b' * 40

    settings.BROKER_BACKEND = 'memory'
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DISABLE_RAVEN = True

    settings.CACHES = {
        'default': {
            'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
        }
    }

    if not hasattr(settings, 'SENTRY_OPTIONS'):
        settings.SENTRY_OPTIONS = {}

    settings.SENTRY_OPTIONS.update({
        'redis.clusters': {
            'default': {
                'hosts': {
                    0: {
                        'db': 9,
                    },
                },
            },
        },
        'mail.backend': 'django.core.mail.backends.locmem.EmailBackend',
        'system.url-prefix': 'http://testserver',
    })

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch('socket.getfqdn', return_value='localhost')
    patcher.start()

    from sentry.runner.initializer import (bootstrap_options,
                                           initialize_receivers, fix_south,
                                           bind_cache_to_option_store)

    bootstrap_options(settings)
    fix_south(settings)

    bind_cache_to_option_store()

    initialize_receivers()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    # force celery registration
    from sentry.celery import app  # NOQA

    # disable DISALLOWED_IPS
    from sentry import http
    http.DISALLOWED_IPS = set()
Beispiel #26
0
def redis_teardown():
    from sentry.utils.redis import clusters
    with clusters.get('default').all() as client:
        client.flushdb()
Beispiel #27
0
from sentry.utils.dates import to_datetime, to_timestamp
from sentry.models import Project, Group, Event

from django.utils import timezone
from django.conf import settings


MAX_RECENT = 15
RECENT_HOURS = 24 * 30


# The Redis cluster manager (``clusters``) was added in Sentry 8.2 (GH-2714)
# and replaces ``make_rb_cluster`` (which will be removed in a future version.)
try:
    from sentry.utils.redis import clusters
    cluster = clusters.get('default')
except ImportError:
    from sentry.utils.redis import make_rb_cluster
    cluster = make_rb_cluster(settings.SENTRY_REDIS_OPTIONS['hosts'])


def get_key(tenant):
    return 'sentry-hipchat-ac:%s:mentions' % tenant.id


def get_recent_mentions(tenant):
    client = cluster.get_routing_client()
    key = get_key(tenant)
    ids = [x for x in client.zrangebyscore(
        key, time.time() - (RECENT_HOURS * 60), '+inf')][-MAX_RECENT:]
Beispiel #28
0
def pytest_configure(config):
    # HACK: Only needed for testing!
    os.environ.setdefault("_SENTRY_SKIP_CONFIGURATION", "1")

    os.environ.setdefault("RECAPTCHA_TESTING", "True")
    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sentry.conf.server")

    settings.SOUTH_TESTS_MIGRATE = os.environ.get("SENTRY_SOUTH_TESTS_MIGRATE", "1") == "1"

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get("DB", "postgres")
        if test_db == "mysql":
            settings.DATABASES["default"].update(
                {"ENGINE": "django.db.backends.mysql", "NAME": "sentry", "USER": "******"}
            )
            # mysql requires running full migration all the time
            settings.SOUTH_TESTS_MIGRATE = True
        elif test_db == "postgres":
            settings.DATABASES["default"].update({"ENGINE": "sentry.db.postgres", "USER": "******", "NAME": "sentry"})
            # postgres requires running full migration all the time
            # since it has to install stored functions which come from
            # an actual migration.
            settings.SOUTH_TESTS_MIGRATE = True
        elif test_db == "sqlite":
            settings.DATABASES["default"].update({"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"})

    settings.TEMPLATE_DEBUG = True

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ("tests",)
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = "sentry.cache.django.DjangoCache"
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index("sentry.middleware.sudo.SudoMiddleware")
    middleware[sudo] = "sentry.testutils.middleware.SudoMiddleware"
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    # enable draft features
    settings.SENTRY_OPTIONS["mail.enable-replies"] = True

    settings.SENTRY_ALLOW_ORIGIN = "*"

    settings.SENTRY_TSDB = "sentry.tsdb.inmemory.InMemoryTSDB"
    settings.SENTRY_TSDB_OPTIONS = {}

    settings.RECAPTCHA_PUBLIC_KEY = "a" * 40
    settings.RECAPTCHA_PRIVATE_KEY = "b" * 40

    settings.BROKER_BACKEND = "memory"
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DISABLE_RAVEN = True

    settings.CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}

    if not hasattr(settings, "SENTRY_OPTIONS"):
        settings.SENTRY_OPTIONS = {}

    settings.SENTRY_OPTIONS.update(
        {
            "redis.clusters": {"default": {"hosts": {0: {"db": 9}}}},
            "mail.backend": "django.core.mail.backends.locmem.EmailBackend",
            "system.url-prefix": "http://testserver",
        }
    )

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch("socket.getfqdn", return_value="localhost")
    patcher.start()

    from sentry.runner.initializer import bootstrap_options, initialize_receivers, fix_south, bind_cache_to_option_store

    bootstrap_options(settings)
    fix_south(settings)

    bind_cache_to_option_store()

    initialize_receivers()

    from sentry.utils.redis import clusters

    with clusters.get("default").all() as client:
        client.flushdb()

    # force celery registration
    from sentry.celery import app  # NOQA

    # disable DISALLOWED_IPS
    from sentry import http

    http.DISALLOWED_IPS = set()
Beispiel #29
0
def pytest_configure(config):
    # HACK: Only needed for testing!
    os.environ.setdefault('_SENTRY_SKIP_CONFIGURATION', '1')

    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')

    # override docs which are typically synchronized from an upstream server
    # to ensure tests are consistent
    os.environ.setdefault(
        'INTEGRATION_DOC_FOLDER',
        os.path.join(TEST_ROOT, 'fixtures', 'integration-docs'))
    from sentry.utils import integrationdocs
    integrationdocs.DOC_FOLDER = os.environ['INTEGRATION_DOC_FOLDER']

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get('DB', 'postgres')
        if test_db == 'mysql':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.mysql',
                'NAME': 'sentry',
                'USER': '******',
                'HOST': '127.0.0.1',
            })
            # mysql requires running full migration all the time
        elif test_db == 'postgres':
            settings.DATABASES['default'].update({
                'ENGINE': 'sentry.db.postgres',
                'USER': '******',
                'NAME': 'sentry',
            })
            # postgres requires running full migration all the time
            # since it has to install stored functions which come from
            # an actual migration.
        elif test_db == 'sqlite':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.sqlite3',
                'NAME': ':memory:',
            })
        else:
            raise RuntimeError('oops, wrong database: %r' % test_db)

    settings.TEMPLATE_DEBUG = True

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ('tests', )
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = [
        'django.contrib.auth.hashers.MD5PasswordHasher',
    ]

    settings.AUTH_PASSWORD_VALIDATORS = []

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
    middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    settings.SENTRY_OPTIONS['cloudflare.secret-key'] = 'cloudflare-secret-key'

    # enable draft features
    settings.SENTRY_OPTIONS['mail.enable-replies'] = True

    settings.SENTRY_ALLOW_ORIGIN = '*'

    settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
    settings.SENTRY_TSDB_OPTIONS = {}

    if settings.SENTRY_NEWSLETTER == 'sentry.newsletter.base.Newsletter':
        settings.SENTRY_NEWSLETTER = 'sentry.newsletter.dummy.DummyNewsletter'
        settings.SENTRY_NEWSLETTER_OPTIONS = {}

    settings.BROKER_BACKEND = 'memory'
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DEBUG_VIEWS = True

    settings.SENTRY_ENCRYPTION_SCHEMES = ()

    settings.DISABLE_RAVEN = True

    settings.CACHES = {
        'default': {
            'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
        }
    }

    if not hasattr(settings, 'SENTRY_OPTIONS'):
        settings.SENTRY_OPTIONS = {}

    settings.SENTRY_OPTIONS.update({
        'redis.clusters': {
            'default': {
                'hosts': {
                    0: {
                        'db': 9,
                    },
                },
            },
        },
        'mail.backend': 'django.core.mail.backends.locmem.EmailBackend',
        'system.url-prefix': 'http://testserver',
        'slack.client-id': 'slack-client-id',
        'slack.client-secret': 'slack-client-secret',
        'slack.verification-token': 'slack-verification-token',
        'github-app.name': 'sentry-test-app',
        'github-app.client-id': 'github-client-id',
        'github-app.client-secret': 'github-client-secret',
        'vsts.client-id': 'vsts-client-id',
        'vsts.client-secret': 'vsts-client-secret',
    })

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch('socket.getfqdn', return_value='localhost')
    patcher.start()

    if not settings.SOUTH_TESTS_MIGRATE:
        settings.INSTALLED_APPS = tuple(i for i in settings.INSTALLED_APPS
                                        if i != 'south')

    from sentry.runner.initializer import (bootstrap_options,
                                           configure_structlog,
                                           initialize_receivers, fix_south,
                                           bind_cache_to_option_store,
                                           setup_services)

    bootstrap_options(settings)
    configure_structlog()
    fix_south(settings)

    bind_cache_to_option_store()

    initialize_receivers()
    setup_services()
    register_extensions()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    # force celery registration
    from sentry.celery import app  # NOQA

    # disable DISALLOWED_IPS
    from sentry import http
    http.DISALLOWED_IPS = set()
Beispiel #30
0
 def cluster(self):
     return clusters.get("default")
Beispiel #31
0
 def cluster(self):
     return clusters.get('default')
Beispiel #32
0
def pytest_configure(config):
    import warnings

    from django.utils.deprecation import RemovedInDjango30Warning

    warnings.filterwarnings(action="ignore", category=RemovedInDjango30Warning)

    # This is just to filter out an obvious warning before the pytest session starts.
    warnings.filterwarnings(
        action="ignore",
        message=r".*sentry.digests.backends.dummy.DummyBackend.*",
        category=UnsupportedBackend,
    )

    # HACK: Only needed for testing!
    os.environ.setdefault("_SENTRY_SKIP_CONFIGURATION", "1")

    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sentry.conf.server")

    # override docs which are typically synchronized from an upstream server
    # to ensure tests are consistent
    os.environ.setdefault(
        "INTEGRATION_DOC_FOLDER", os.path.join(TEST_ROOT, "fixtures", "integration-docs")
    )
    from sentry.utils import integrationdocs

    integrationdocs.DOC_FOLDER = os.environ["INTEGRATION_DOC_FOLDER"]

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get("DB", "postgres")
        if test_db == "postgres":
            settings.DATABASES["default"].update(
                {
                    "ENGINE": "sentry.db.postgres",
                    "USER": "******",
                    "NAME": "sentry",
                    "HOST": "127.0.0.1",
                }
            )
            # postgres requires running full migration all the time
            # since it has to install stored functions which come from
            # an actual migration.
        else:
            raise RuntimeError("oops, wrong database: %r" % test_db)

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ("tests",)
    if "sentry" in settings.INSTALLED_APPS:
        settings.INSTALLED_APPS = settings.INSTALLED_APPS + ("sentry.demo",)
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = "sentry.cache.django.DjangoCache"
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]

    settings.AUTH_PASSWORD_VALIDATORS = []

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE)
    sudo = middleware.index("sentry.middleware.sudo.SudoMiddleware")
    middleware[sudo] = "sentry.testutils.middleware.SudoMiddleware"
    settings.MIDDLEWARE = tuple(middleware)

    settings.SENTRY_OPTIONS["cloudflare.secret-key"] = "cloudflare-secret-key"

    # enable draft features
    settings.SENTRY_OPTIONS["mail.enable-replies"] = True

    settings.SENTRY_ALLOW_ORIGIN = "*"

    settings.SENTRY_TSDB = "sentry.tsdb.inmemory.InMemoryTSDB"
    settings.SENTRY_TSDB_OPTIONS = {}

    settings.SENTRY_NEWSLETTER = "sentry.newsletter.dummy.DummyNewsletter"
    settings.SENTRY_NEWSLETTER_OPTIONS = {}

    settings.BROKER_BACKEND = "memory"
    settings.BROKER_URL = "memory://"
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DEBUG_VIEWS = True
    settings.SERVE_UPLOADED_FILES = True

    settings.SENTRY_ENCRYPTION_SCHEMES = ()

    settings.CACHES = {
        "default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"},
        "nodedata": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"},
    }

    settings.SENTRY_RATELIMITER = "sentry.ratelimits.redis.RedisRateLimiter"
    settings.SENTRY_RATELIMITER_OPTIONS = {}

    if os.environ.get("USE_SNUBA", False):
        settings.SENTRY_SEARCH = "sentry.search.snuba.EventsDatasetSnubaSearchBackend"
        settings.SENTRY_TSDB = "sentry.tsdb.redissnuba.RedisSnubaTSDB"
        settings.SENTRY_EVENTSTREAM = "sentry.eventstream.snuba.SnubaEventStream"

    if os.environ.get("USE_INDEXER", False):
        settings.SENTRY_METRICS_INDEXER = "sentry.sentry_metrics.indexer.postgres.PGStringIndexer"

    if os.environ.get("DISABLE_TEST_SDK", False):
        settings.SENTRY_SDK_CONFIG = {}

    if not hasattr(settings, "SENTRY_OPTIONS"):
        settings.SENTRY_OPTIONS = {}

    settings.SENTRY_OPTIONS.update(
        {
            "redis.clusters": {"default": {"hosts": {0: {"db": 9}}}},
            "mail.backend": "django.core.mail.backends.locmem.EmailBackend",
            "system.url-prefix": "http://testserver",
            "system.secret-key": "a" * 52,
            "slack.client-id": "slack-client-id",
            "slack.client-secret": "slack-client-secret",
            "slack.verification-token": "slack-verification-token",
            "slack.signing-secret": "slack-signing-secret",
            "github-app.name": "sentry-test-app",
            "github-app.client-id": "github-client-id",
            "github-app.client-secret": "github-client-secret",
            "vsts.client-id": "vsts-client-id",
            "vsts.client-secret": "vsts-client-secret",
            "vsts-limited.client-id": "vsts-limited-client-id",
            "vsts-limited.client-secret": "vsts-limited-client-secret",
            "vercel.client-id": "vercel-client-id",
            "vercel.client-secret": "vercel-client-secret",
            "msteams.client-id": "msteams-client-id",
            "msteams.client-secret": "msteams-client-secret",
            "aws-lambda.access-key-id": "aws-key-id",
            "aws-lambda.secret-access-key": "aws-secret-access-key",
            "aws-lambda.cloudformation-url": "https://example.com/file.json",
            "aws-lambda.account-number": "1234",
            "aws-lambda.node.layer-name": "my-layer",
            "aws-lambda.node.layer-version": "3",
            "aws-lambda.python.layer-name": "my-python-layer",
            "aws-lambda.python.layer-version": "34",
        }
    )

    # Plugin-related settings
    settings.ASANA_CLIENT_ID = "abc"
    settings.ASANA_CLIENT_SECRET = "123"
    settings.BITBUCKET_CONSUMER_KEY = "abc"
    settings.BITBUCKET_CONSUMER_SECRET = "123"
    settings.GITHUB_APP_ID = "abc"
    settings.GITHUB_API_SECRET = "123"
    # this isn't the real secret
    settings.SENTRY_OPTIONS["github.integration-hook-secret"] = "b3002c3e321d4b7880360d397db2ccfd"

    # This is so tests can assume this feature is off by default
    settings.SENTRY_FEATURES["organizations:performance-view"] = False

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch("socket.getfqdn", return_value="localhost")
    patcher.start()

    if not settings.MIGRATIONS_TEST_MIGRATE:
        # Migrations for the "sentry" app take a long time to run, which makes test startup time slow in dev.
        # This is a hack to force django to sync the database state from the models rather than use migrations.
        settings.MIGRATION_MODULES["sentry"] = None
        settings.MIGRATION_MODULES["demo"] = None

    asset_version_patcher = mock.patch(
        "sentry.runner.initializer.get_asset_version", return_value="{version}"
    )
    asset_version_patcher.start()
    from sentry.runner.initializer import initialize_app

    initialize_app({"settings": settings, "options": None})
    register_extensions()

    from sentry.utils.redis import clusters

    with clusters.get("default").all() as client:
        client.flushdb()

    # force celery registration
    # disable DISALLOWED_IPS
    from sentry import http
    from sentry.celery import app  # NOQA

    http.DISALLOWED_IPS = set()
Beispiel #33
0
def pytest_configure(config):
    # HACK: Only needed for testing!
    os.environ.setdefault('_SENTRY_SKIP_CONFIGURATION', '1')

    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')

    # override docs which are typically synchronized from an upstream server
    # to ensure tests are consistent
    os.environ.setdefault(
        'INTEGRATION_DOC_FOLDER',
        os.path.join(
            TEST_ROOT,
            'fixtures',
            'integration-docs'))
    from sentry.utils import integrationdocs
    integrationdocs.DOC_FOLDER = os.environ['INTEGRATION_DOC_FOLDER']

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get('DB', 'postgres')
        if test_db == 'mysql':
            settings.DATABASES['default'].update(
                {
                    'ENGINE': 'django.db.backends.mysql',
                    'NAME': 'sentry',
                    'USER': '******',
                    'HOST': '127.0.0.1',
                }
            )
            # mysql requires running full migration all the time
        elif test_db == 'postgres':
            settings.DATABASES['default'].update(
                {
                    'ENGINE': 'sentry.db.postgres',
                    'USER': '******',
                    'NAME': 'sentry',
                    'HOST': '127.0.0.1',
                }
            )
            # postgres requires running full migration all the time
            # since it has to install stored functions which come from
            # an actual migration.
        elif test_db == 'sqlite':
            settings.DATABASES['default'].update(
                {
                    'ENGINE': 'django.db.backends.sqlite3',
                    'NAME': ':memory:',
                }
            )
        else:
            raise RuntimeError('oops, wrong database: %r' % test_db)

    settings.TEMPLATE_DEBUG = True

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ('tests', )
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = [
        'django.contrib.auth.hashers.MD5PasswordHasher',
    ]

    settings.AUTH_PASSWORD_VALIDATORS = []

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
    middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    settings.SENTRY_OPTIONS['cloudflare.secret-key'] = 'cloudflare-secret-key'

    # enable draft features
    settings.SENTRY_OPTIONS['mail.enable-replies'] = True

    settings.SENTRY_ALLOW_ORIGIN = '*'

    settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
    settings.SENTRY_TSDB_OPTIONS = {}

    if settings.SENTRY_NEWSLETTER == 'sentry.newsletter.base.Newsletter':
        settings.SENTRY_NEWSLETTER = 'sentry.newsletter.dummy.DummyNewsletter'
        settings.SENTRY_NEWSLETTER_OPTIONS = {}

    settings.BROKER_BACKEND = 'memory'
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DEBUG_VIEWS = True

    settings.SENTRY_ENCRYPTION_SCHEMES = ()

    settings.DISABLE_RAVEN = True

    settings.CACHES = {
        'default': {
            'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
        }
    }

    if not hasattr(settings, 'SENTRY_OPTIONS'):
        settings.SENTRY_OPTIONS = {}

    settings.SENTRY_OPTIONS.update(
        {
            'redis.clusters': {
                'default': {
                    'hosts': {
                        0: {
                            'db': 9,
                        },
                    },
                },
            },
            'mail.backend': 'django.core.mail.backends.locmem.EmailBackend',
            'system.url-prefix': 'http://testserver',

            'slack.client-id': 'slack-client-id',
            'slack.client-secret': 'slack-client-secret',
            'slack.verification-token': 'slack-verification-token',

            'github-app.name': 'sentry-test-app',
            'github-app.client-id': 'github-client-id',
            'github-app.client-secret': 'github-client-secret',

            'vsts.client-id': 'vsts-client-id',
            'vsts.client-secret': 'vsts-client-secret',
        }
    )

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch('socket.getfqdn', return_value='localhost')
    patcher.start()

    if not settings.SOUTH_TESTS_MIGRATE:
        settings.INSTALLED_APPS = tuple(i for i in settings.INSTALLED_APPS if i != 'south')

    from sentry.runner.initializer import (
        bootstrap_options, configure_structlog, initialize_receivers, fix_south,
        bind_cache_to_option_store, setup_services
    )

    bootstrap_options(settings)
    configure_structlog()
    fix_south(settings)

    import django
    if hasattr(django, 'setup'):
        django.setup()

    bind_cache_to_option_store()

    initialize_receivers()
    setup_services()
    register_extensions()

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    # force celery registration
    from sentry.celery import app  # NOQA

    # disable DISALLOWED_IPS
    from sentry import http
    http.DISALLOWED_IPS = set()