Esempio n. 1
0
 def setUp(self):
     self.buf = RedisBuffer()
Esempio n. 2
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer()

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == '1'

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u'\u201d') == '”'

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    def test_process_pending_one_batch(self, process_incr):
        self.buf.incr_batch_size = 5
        with self.buf.cluster.map() as client:
            client.zadd('b:p', 1, 'foo')
            client.zadd('b:p', 2, 'bar')
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['foo', 'bar'],
        })
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    def test_process_pending_multiple_batches(self, process_incr):
        self.buf.incr_batch_size = 2
        with self.buf.cluster.map() as client:
            client.zadd('b:p', 1, 'foo')
            client.zadd('b:p', 2, 'bar')
            client.zadd('b:p', 3, 'baz')
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['foo', 'bar'],
        })
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['baz'],
        })
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_bubble_up(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            'foo', {
                'e+foo': "S'bar'\np1\n.",
                'f': "(dp1\nS'pk'\np2\nI1\ns.",
                'i+times_seen': '2',
                'm': 'sentry.models.Group',
            }
        )
        columns = {'times_seen': 2}
        filters = {'pk': 1}
        extra = {'foo': 'bar'}
        self.buf.process('foo')
        process.assert_called_once_with(Group, columns, filters, extra)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr', mock.Mock())
    def test_incr_saves_to_redis(self):
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = 'Mock'
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        result = client.hgetall('foo')
        assert result == {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '1',
            'm': 'mock.Mock',
        }
        pending = client.zrange('b:p', 0, -1)
        assert pending == ['foo']
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        result = client.hgetall('foo')
        assert result == {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '2',
            'm': 'mock.Mock',
        }
        pending = client.zrange('b:p', 0, -1)
        assert pending == ['foo']

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    @mock.patch('sentry.buffer.redis.process_pending')
    def test_process_pending_partitions_none(self, process_pending, process_incr):
        self.buf.pending_partitions = 2
        with self.buf.cluster.map() as client:
            client.zadd('b:p:0', 1, 'foo')
            client.zadd('b:p:1', 1, 'bar')
            client.zadd('b:p', 1, 'baz')

        # On first pass, we are expecing to do:
        # * process the buffer that doesn't have a partition (b:p)
        # * queue up 2 jobs, one for each partition to process.
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['baz'],
        })
        assert len(process_pending.apply_async.mock_calls) == 2
        process_pending.apply_async.mock_calls == [
            mock.call(kwargs={'partition': 0}),
            mock.call(kwargs={'partition': 1}),
        ]

        # Confirm that we've only processed the unpartitioned buffer
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []
        assert client.zrange('b:p:0', 0, -1) != []
        assert client.zrange('b:p:1', 0, -1) != []

        # partition 0
        self.buf.process_pending(partition=0)
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['foo'],
        })
        assert client.zrange('b:p:0', 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2

        # partition 1
        self.buf.process_pending(partition=1)
        assert len(process_incr.apply_async.mock_calls) == 3
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['bar'],
        })
        assert client.zrange('b:p:1', 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2
Esempio n. 3
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer()

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == "1"

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u"\u201d") == "”"

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    def test_process_pending_one_batch(self, process_incr):
        self.buf.incr_batch_size = 5
        with self.buf.cluster.map() as client:
            client.zadd("b:p", 1, "foo")
            client.zadd("b:p", 2, "bar")
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["foo", "bar"]})
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    def test_process_pending_multiple_batches(self, process_incr):
        self.buf.incr_batch_size = 2
        with self.buf.cluster.map() as client:
            client.zadd("b:p", 1, "foo")
            client.zadd("b:p", 2, "bar")
            client.zadd("b:p", 3, "baz")
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["foo", "bar"]})
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["baz"]})
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_does_bubble_up_json(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            "foo",
            {
                "e+foo": '["s","bar"]',
                "e+datetime": '["d","1493791566.000000"]',
                "f": '{"pk": ["i","1"]}',
                "i+times_seen": "2",
                "m": "sentry.models.Group",
            },
        )
        columns = {"times_seen": 2}
        filters = {"pk": 1}
        extra = {
            "foo": "bar",
            "datetime": datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        }
        signal_only = None
        self.buf.process("foo")
        process.assert_called_once_with(Group, columns, filters, extra,
                                        signal_only)

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_does_bubble_up_pickle(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            "foo",
            {
                "e+foo": "S'bar'\np1\n.",
                "f": "(dp1\nS'pk'\np2\nI1\ns.",
                "i+times_seen": "2",
                "m": "sentry.models.Group",
            },
        )
        columns = {"times_seen": 2}
        filters = {"pk": 1}
        extra = {"foo": "bar"}
        signal_only = None
        self.buf.process("foo")
        process.assert_called_once_with(Group, columns, filters, extra,
                                        signal_only)

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr", mock.Mock())
    def test_incr_saves_to_redis(self):
        now = datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = {"times_seen": 1}
        filters = {"pk": 1, "datetime": now}
        self.buf.incr(model,
                      columns,
                      filters,
                      extra={
                          "foo": "bar",
                          "datetime": now
                      })
        result = client.hgetall("foo")
        f = result.pop("f")
        assert pickle.loads(f) == {"pk": 1, "datetime": now}
        assert pickle.loads(result.pop("e+datetime")) == now
        assert pickle.loads(result.pop("e+foo")) == "bar"
        assert result == {"i+times_seen": "1", "m": "mock.mock.Mock"}

        pending = client.zrange("b:p", 0, -1)
        assert pending == ["foo"]
        self.buf.incr(model,
                      columns,
                      filters,
                      extra={
                          "foo": "baz",
                          "datetime": now
                      })
        result = client.hgetall("foo")
        f = result.pop("f")
        assert pickle.loads(f) == {"pk": 1, "datetime": now}
        assert pickle.loads(result.pop("e+datetime")) == now
        assert pickle.loads(result.pop("e+foo")) == "baz"
        assert result == {"i+times_seen": "2", "m": "mock.mock.Mock"}

        pending = client.zrange("b:p", 0, -1)
        assert pending == ["foo"]

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr", mock.Mock())
    def test_batching_incr_saves_to_redis(self):
        now = datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = {"times_seen": 1}
        filters = {"pk": 1, "datetime": now}
        with mock.patch("sentry.app.buffer", self.buf):
            with batch_buffers_incr():
                self.buf.incr(model,
                              columns,
                              filters,
                              extra={
                                  "foo": "bar",
                                  "datetime": now
                              })

                # changes should only be visible on batching_buffers_incr() exit
                assert not client.hgetall("foo")

                self.buf.incr(model,
                              columns,
                              filters,
                              extra={
                                  "foo": "baz",
                                  "datetime": now
                              })

        result = client.hgetall("foo")
        f = result.pop("f")
        assert pickle.loads(f) == {"pk": 1, "datetime": now}
        assert pickle.loads(result.pop("e+datetime")) == now
        assert pickle.loads(result.pop("e+foo")) == "baz"
        assert result == {"i+times_seen": "2", "m": "mock.mock.Mock"}

        pending = client.zrange("b:p", 0, -1)
        assert pending == ["foo"]

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    @mock.patch("sentry.buffer.redis.process_pending")
    def test_process_pending_partitions_none(self, process_pending,
                                             process_incr):
        self.buf.pending_partitions = 2
        with self.buf.cluster.map() as client:
            client.zadd("b:p:0", 1, "foo")
            client.zadd("b:p:1", 1, "bar")
            client.zadd("b:p", 1, "baz")

        # On first pass, we are expecting to do:
        # * process the buffer that doesn't have a partition (b:p)
        # * queue up 2 jobs, one for each partition to process.
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["baz"]})
        assert len(process_pending.apply_async.mock_calls) == 2
        process_pending.apply_async.mock_calls == [
            mock.call(kwargs={"partition": 0}),
            mock.call(kwargs={"partition": 1}),
        ]

        # Confirm that we've only processed the unpartitioned buffer
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []
        assert client.zrange("b:p:0", 0, -1) != []
        assert client.zrange("b:p:1", 0, -1) != []

        # partition 0
        self.buf.process_pending(partition=0)
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["foo"]})
        assert client.zrange("b:p:0", 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2

        # partition 1
        self.buf.process_pending(partition=1)
        assert len(process_incr.apply_async.mock_calls) == 3
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["bar"]})
        assert client.zrange("b:p:1", 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_uses_signal_only(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            "foo",
            {
                "f": '{"pk": ["i","1"]}',
                "i+times_seen": "1",
                "m": "sentry.utils.compat.mock.Mock",
                "s": "1",
            },
        )
        self.buf.process("foo")
        process.assert_called_once_with(mock.Mock, {"times_seen": 1},
                                        {"pk": 1}, {}, True)

    """
    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    def test_incr_uses_signal_only(self):
        now = datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = {"times_seen": 1}
        filters = {"pk": 1, "datetime": now}
        self.buf.incr(model, columns, filters, extra={"foo": "bar", "datetime": now}, signal_only=True)
        result = client.hgetall("foo")
        assert result == {
            "e+foo": '["s","bar"]',
            "e+datetime": '["d","1493791566.000000"]',
            "f": '{"pk":["i","1"],"datetime":["d","1493791566.000000"]}',
            "i+times_seen": "1",
            "m": "mock.mock.Mock",
            "s": "1"
        }
    """

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis._local_buffers", dict())
    def test_signal_only_saved_local_buffs(self):
        now = datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = {"times_seen": 1}
        filters = {"pk": 1, "datetime": now}

        self.buf.incr(model,
                      columns,
                      filters,
                      extra={
                          "foo": "bar",
                          "datetime": now
                      },
                      signal_only=True)

        from sentry.buffer.redis import _local_buffers

        frozen_filters = tuple(sorted(filters.items()))
        key = (frozen_filters, model)
        values = _local_buffers[key]

        assert values[-1]  # signal_only stored last
Esempio n. 4
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer()

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == b"1"

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val("\u201d") == "”".encode()

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    def test_process_pending_one_batch(self, process_incr):
        self.buf.incr_batch_size = 5
        with self.buf.cluster.map() as client:
            client.zadd("b:p", {"foo": 1, "bar": 2})
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(kwargs={"batch_keys": ["foo", "bar"]})
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    def test_process_pending_multiple_batches(self, process_incr):
        self.buf.incr_batch_size = 2
        with self.buf.cluster.map() as client:
            client.zadd("b:p", {"foo": 1, "bar": 2, "baz": 3})
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={"batch_keys": ["foo", "bar"]})
        process_incr.apply_async.assert_any_call(kwargs={"batch_keys": ["baz"]})
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_does_bubble_up_json(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            "foo",
            {
                "e+foo": '["s","bar"]',
                "e+datetime": '["d","1493791566.000000"]',
                "f": '{"pk": ["i","1"]}',
                "i+times_seen": "2",
                "m": "sentry.models.Group",
            },
        )
        columns = {"times_seen": 2}
        filters = {"pk": 1}
        extra = {"foo": "bar", "datetime": datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)}
        signal_only = None
        self.buf.process("foo")
        process.assert_called_once_with(Group, columns, filters, extra, signal_only)

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_does_bubble_up_pickle(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            "foo",
            {
                "e+foo": "S'bar'\np1\n.",
                "f": "(dp1\nS'pk'\np2\nI1\ns.",
                "i+times_seen": "2",
                "m": "sentry.models.Group",
            },
        )
        columns = {"times_seen": 2}
        filters = {"pk": 1}
        extra = {"foo": "bar"}
        signal_only = None
        self.buf.process("foo")
        process.assert_called_once_with(Group, columns, filters, extra, signal_only)

    def test_get(self):
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = ["times_seen"]
        filters = {"pk": 1}
        # If the value doesn't exist we just assume 0
        assert self.buf.get(model, columns, filters=filters) == {"times_seen": 0}
        self.buf.incr(model, {"times_seen": 1}, filters)
        assert self.buf.get(model, columns, filters=filters) == {"times_seen": 1}
        self.buf.incr(model, {"times_seen": 5}, filters)
        assert self.buf.get(model, columns, filters=filters) == {"times_seen": 6}

    def test_incr_saves_to_redis(self):
        now = datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = {"times_seen": 1}
        filters = {"pk": 1, "datetime": now}
        key = self.buf._make_key(model, filters=filters)
        self.buf.incr(model, columns, filters, extra={"foo": "bar", "datetime": now})
        result = client.hgetall(key)
        # Force keys to strings
        result = {force_text(k): v for k, v in result.items()}

        f = result.pop("f")
        assert pickle.loads(f) == {"pk": 1, "datetime": now}
        assert pickle.loads(result.pop("e+datetime")) == now
        assert pickle.loads(result.pop("e+foo")) == "bar"
        assert result == {"i+times_seen": b"1", "m": b"unittest.mock.Mock"}

        pending = client.zrange("b:p", 0, -1)
        assert pending == [key.encode("utf-8")]
        self.buf.incr(model, columns, filters, extra={"foo": "baz", "datetime": now})
        result = client.hgetall(key)
        # Force keys to strings
        result = {force_text(k): v for k, v in result.items()}
        f = result.pop("f")
        assert pickle.loads(f) == {"pk": 1, "datetime": now}
        assert pickle.loads(result.pop("e+datetime")) == now
        assert pickle.loads(result.pop("e+foo")) == "baz"
        assert result == {"i+times_seen": b"2", "m": b"unittest.mock.Mock"}

        pending = client.zrange("b:p", 0, -1)
        assert pending == [key.encode("utf-8")]

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    @mock.patch("sentry.buffer.redis.process_pending")
    def test_process_pending_partitions_none(self, process_pending, process_incr):
        self.buf.pending_partitions = 2
        with self.buf.cluster.map() as client:
            client.zadd("b:p:0", {"foo": 1})
            client.zadd("b:p:1", {"bar": 1})
            client.zadd("b:p", {"baz": 1})

        # On first pass, we are expecting to do:
        # * process the buffer that doesn't have a partition (b:p)
        # * queue up 2 jobs, one for each partition to process.
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(kwargs={"batch_keys": ["baz"]})
        assert len(process_pending.apply_async.mock_calls) == 2
        assert process_pending.apply_async.mock_calls == [
            mock.call(kwargs={"partition": 0}),
            mock.call(kwargs={"partition": 1}),
        ]

        # Confirm that we've only processed the unpartitioned buffer
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []
        assert client.zrange("b:p:0", 0, -1) != []
        assert client.zrange("b:p:1", 0, -1) != []

        # partition 0
        self.buf.process_pending(partition=0)
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={"batch_keys": ["foo"]})
        assert client.zrange("b:p:0", 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2

        # partition 1
        self.buf.process_pending(partition=1)
        assert len(process_incr.apply_async.mock_calls) == 3
        process_incr.apply_async.assert_any_call(kwargs={"batch_keys": ["bar"]})
        assert client.zrange("b:p:1", 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_uses_signal_only(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            "foo",
            {
                "f": '{"pk": ["i","1"]}',
                "i+times_seen": "1",
                "m": "unittest.mock.Mock",
                "s": "1",
            },
        )
        self.buf.process("foo")
        process.assert_called_once_with(mock.Mock, {"times_seen": 1}, {"pk": 1}, {}, True)
Esempio n. 5
0
 def setUp(self):
     self.buf = RedisBuffer(hosts={
         0: {'db': 9}
     })
Esempio n. 6
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer()

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == '1'

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u'\u201d') == '”'

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    def test_process_pending_one_batch(self, process_incr):
        self.buf.incr_batch_size = 5
        with self.buf.cluster.map() as client:
            client.zadd('b:p', 1, 'foo')
            client.zadd('b:p', 2, 'bar')
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['foo', 'bar'],
        })
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    def test_process_pending_multiple_batches(self, process_incr):
        self.buf.incr_batch_size = 2
        with self.buf.cluster.map() as client:
            client.zadd('b:p', 1, 'foo')
            client.zadd('b:p', 2, 'bar')
            client.zadd('b:p', 3, 'baz')
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['foo', 'bar'],
        })
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['baz'],
        })
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_bubble_up_json(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            'foo', {
                'e+foo': '["s","bar"]',
                'e+datetime': '["d","1493791566.000000"]',
                'f': '{"pk": ["i","1"]}',
                'i+times_seen': '2',
                'm': 'sentry.models.Group',
            })
        columns = {'times_seen': 2}
        filters = {'pk': 1}
        extra = {
            'foo': 'bar',
            'datetime': datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        }
        self.buf.process('foo')
        process.assert_called_once_with(Group, columns, filters, extra)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_bubble_up_pickle(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            'foo', {
                'e+foo': "S'bar'\np1\n.",
                'f': "(dp1\nS'pk'\np2\nI1\ns.",
                'i+times_seen': '2',
                'm': 'sentry.models.Group',
            })
        columns = {'times_seen': 2}
        filters = {'pk': 1}
        extra = {'foo': 'bar'}
        self.buf.process('foo')
        process.assert_called_once_with(Group, columns, filters, extra)

    # this test should be passing once we no longer serialize using pickle
    @pytest.mark.xfail
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr', mock.Mock())
    def test_incr_saves_to_redis(self):
        now = datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = 'Mock'
        columns = {'times_seen': 1}
        filters = {'pk': 1, 'datetime': now}
        self.buf.incr(model,
                      columns,
                      filters,
                      extra={
                          'foo': 'bar',
                          'datetime': now
                      })
        result = client.hgetall('foo')
        assert result == {
            'e+foo': '["s","bar"]',
            'e+datetime': '["d","1493791566.000000"]',
            'f': '{"pk":["i","1"],"datetime":["d","1493791566.000000"]}',
            'i+times_seen': '1',
            'm': 'mock.mock.Mock',
        }
        pending = client.zrange('b:p', 0, -1)
        assert pending == ['foo']
        self.buf.incr(model, columns, filters, extra={'foo': 'baz'})
        result = client.hgetall('foo')
        assert result == {
            'e+foo': '["s","baz"]',
            'e+datetime': '["d","1493791566.000000"]',
            'f': '{"pk":["i","1"],"datetime":["d","1493791566.000000"]}',
            'i+times_seen': '2',
            'm': 'mock.mock.Mock',
        }
        pending = client.zrange('b:p', 0, -1)
        assert pending == ['foo']

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    @mock.patch('sentry.buffer.redis.process_pending')
    def test_process_pending_partitions_none(self, process_pending,
                                             process_incr):
        self.buf.pending_partitions = 2
        with self.buf.cluster.map() as client:
            client.zadd('b:p:0', 1, 'foo')
            client.zadd('b:p:1', 1, 'bar')
            client.zadd('b:p', 1, 'baz')

        # On first pass, we are expecing to do:
        # * process the buffer that doesn't have a partition (b:p)
        # * queue up 2 jobs, one for each partition to process.
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['baz'],
        })
        assert len(process_pending.apply_async.mock_calls) == 2
        process_pending.apply_async.mock_calls == [
            mock.call(kwargs={'partition': 0}),
            mock.call(kwargs={'partition': 1}),
        ]

        # Confirm that we've only processed the unpartitioned buffer
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []
        assert client.zrange('b:p:0', 0, -1) != []
        assert client.zrange('b:p:1', 0, -1) != []

        # partition 0
        self.buf.process_pending(partition=0)
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['foo'],
        })
        assert client.zrange('b:p:0', 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2

        # partition 1
        self.buf.process_pending(partition=1)
        assert len(process_incr.apply_async.mock_calls) == 3
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['bar'],
        })
        assert client.zrange('b:p:1', 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2
Esempio n. 7
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer(hosts={0: {'db': 9}})
        self.buf.conn.flushdb()

    def test_default_host_is_local(self):
        buf = RedisBuffer()
        self.assertEquals(len(buf.conn.hosts), 1)
        self.assertEquals(buf.conn.hosts[0].host, 'localhost')

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == '1'

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u'\u201d') == '”'

    def test_make_key_response(self):
        column = 'times_seen'
        filters = {'pk': 1}
        self.assertEquals(
            self.buf._make_key(Group, filters, column),
            'sentry.group:88b48b31b5f100719c64316596b10b0f:times_seen')

    def test_make_extra_key_response(self):
        filters = {'pk': 1}
        self.assertEquals(
            self.buf._make_extra_key(Group, filters),
            'sentry.group:extra:88b48b31b5f100719c64316596b10b0f')

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key',
                mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.process_incr')
    def test_incr_delays_task(self, process_incr):
        model = mock.Mock()
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters)
        kwargs = dict(model=model,
                      columns=columns,
                      filters=filters,
                      extra=None)
        process_incr.apply_async.assert_called_once_with(kwargs=kwargs,
                                                         countdown=5)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key',
                mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.process_incr', mock.Mock())
    def test_incr_does_buffer_to_conn(self):
        model = mock.Mock()
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters)
        self.assertEquals(self.buf.conn.get('foo'), '1')

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key',
                mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_not_save_empty_results(self, process):
        group = Group.objects.create(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        self.buf.process(Group, columns, filters)
        self.assertFalse(process.called)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key',
                mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_save_call_with_results(self, process):
        group = Group.objects.create(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        self.buf.conn.set('foo', 2)
        self.buf.process(Group, columns, filters)
        process.assert_called_once_with(Group, {'times_seen': 2}, filters,
                                        None)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key',
                mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_clear_buffer(self, process):
        group = Group.objects.create(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        self.buf.conn.set('foo', 2)
        self.buf.process(Group, columns, filters)
        self.assertEquals(self.buf.conn.get('foo'), '0')

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key',
                mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.process_incr', mock.Mock())
    def test_incr_does_buffer_extra_to_conn(self):
        model = mock.Mock()
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        self.assertEquals(self.buf.conn.hget('extra', 'foo'),
                          pickle.dumps('bar'))

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key',
                mock.Mock(return_value='extra'))
    def test_process_saves_extra(self):
        group = Group.objects.create(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        the_date = (timezone.now() + timedelta(days=5)).replace(microsecond=0)
        self.buf.conn.set('foo', 1)
        self.buf.conn.hset('extra', 'last_seen', pickle.dumps(the_date))
        self.buf.process(Group, columns, filters)
        group_ = Group.objects.get(pk=group.pk)
        self.assertEquals(group_.last_seen.replace(microsecond=0), the_date)
Esempio n. 8
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer(hosts={
            0: {'db': 9}
        })
        self.buf.conn.flushdb()

    def test_default_host_is_local(self):
        buf = RedisBuffer()
        self.assertEquals(len(buf.conn.hosts), 1)
        self.assertEquals(buf.conn.hosts[0].host, 'localhost')

    def test_map_column_handles_foreignkeys(self):
        self.assertEquals(self.buf._map_column(Group, 'project', Project(id=1)), 1)

    def test_make_key_response(self):
        column = 'times_seen'
        filters = {'pk': 1}
        self.assertEquals(self.buf._make_key(Group, filters, column), 'sentry.group:88b48b31b5f100719c64316596b10b0f:times_seen')

    def test_make_extra_key_response(self):
        filters = {'pk': 1}
        self.assertEquals(self.buf._make_extra_key(Group, filters), 'sentry.group:extra:88b48b31b5f100719c64316596b10b0f')

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key', mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.maybe_delay')
    def test_incr_delays_task(self, maybe_delay):
        model = mock.Mock()
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters)
        maybe_delay.assert_called_once_with(process_incr, model=model, columns=columns, filters=filters, extra=None)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key', mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.maybe_delay', mock.Mock())
    def test_incr_does_buffer_to_conn(self):
        model = mock.Mock()
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters)
        self.assertEquals(self.buf.conn.get('foo'), '1')

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key', mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_not_save_empty_results(self, process):
        group = Group.objects.create(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        self.buf.process(Group, columns, filters)
        self.assertFalse(process.called)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key', mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_save_call_with_results(self, process):
        group = Group.objects.create(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        self.buf.conn.set('foo', 2)
        self.buf.process(Group, columns, filters)
        process.assert_called_once_with(Group, {'times_seen': 2}, filters, None)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key', mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_clear_buffer(self, process):
        group = Group.objects.create(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        self.buf.conn.set('foo', 2)
        self.buf.process(Group, columns, filters)
        self.assertEquals(self.buf.conn.get('foo'), '0')

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key', mock.Mock(return_value='extra'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.maybe_delay', mock.Mock())
    def test_incr_does_buffer_extra_to_conn(self):
        model = mock.Mock()
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        self.assertEquals(self.buf.conn.hget('extra', 'foo'), pickle.dumps('bar'))

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.RedisBuffer._make_extra_key', mock.Mock(return_value='extra'))
    def test_process_saves_extra(self):
        group = Group.objects.create(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        the_date = datetime.now() + timedelta(days=5)
        self.buf.conn.set('foo', 1)
        self.buf.conn.hset('extra', 'last_seen', pickle.dumps(the_date))
        self.buf.process(Group, columns, filters)
        group_ = Group.objects.get(pk=group.pk)
        self.assertEquals(group_.last_seen, the_date)
Esempio n. 9
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer(hosts={0: {'db': 9}})

    def test_default_host_is_local(self):
        buf = RedisBuffer()
        self.assertEquals(len(buf.conn.hosts), 1)
        self.assertEquals(buf.conn.hosts[0].host, 'localhost')

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == '1'

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u'\u201d') == '”'

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    def test_process_pending(self, process_incr):
        self.buf.conn.zadd('b:p', 1, 'foo')
        self.buf.conn.zadd('b:p', 2, 'bar')
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={'key': 'foo'})
        process_incr.apply_async.assert_any_call(kwargs={'key': 'bar'})
        assert self.buf.conn.zrange('b:p', 0, -1) == []

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_bubble_up(self, process):
        self.buf.conn.hmset(
            'foo', {
                'e+foo': "S'bar'\np1\n.",
                'f': "(dp1\nS'pk'\np2\nI1\ns.",
                'i+times_seen': '2',
                'm': 'sentry.models.Group',
            })
        columns = {'times_seen': 2}
        filters = {'pk': 1}
        extra = {'foo': 'bar'}
        self.buf.process('foo')
        process.assert_called_once_with(Group, columns, filters, extra)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr', mock.Mock())
    def test_incr_saves_to_redis(self):
        model = mock.Mock()
        model.__name__ = 'Mock'
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        result = self.buf.conn.hgetall('foo')
        assert result == {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '1',
            'm': 'mock.Mock',
        }
        pending = self.buf.conn.zrange('b:p', 0, -1)
        assert pending == ['foo']
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        result = self.buf.conn.hgetall('foo')
        assert result == {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '2',
            'm': 'mock.Mock',
        }
        pending = self.buf.conn.zrange('b:p', 0, -1)
        assert pending == ['foo']
Esempio n. 10
0
 def setUp(self):
     self.buf = RedisBuffer(hosts={0: {'db': 9}})
Esempio n. 11
0
 def setUp(self):
     self.buf = RedisBuffer(hosts={0: {"db": 9}})
Esempio n. 12
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer(hosts={0: {"db": 9}})

    def test_default_host_is_local(self):
        buf = RedisBuffer()
        self.assertEquals(len(buf.conn.hosts), 1)
        self.assertEquals(buf.conn.hosts[0].host, "localhost")

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == "1"

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u"\u201d") == "”"

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    def test_process_pending(self, process_incr):
        self.buf.conn.zadd("b:p", "foo", 1)
        self.buf.conn.zadd("b:p", "bar", 2)
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={"key": "foo"})
        process_incr.apply_async.assert_any_call(kwargs={"key": "bar"})
        assert self.buf.conn.zrange("b:p", 0, -1) == []

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_does_bubble_up(self, process):
        self.buf.conn.hmset(
            "foo",
            {"e+foo": "S'bar'\np1\n.", "f": "(dp1\nS'pk'\np2\nI1\ns.", "i+times_seen": "2", "m": "sentry.models.Group"},
        )
        columns = {"times_seen": 2}
        filters = {"pk": 1}
        extra = {"foo": "bar"}
        self.buf.process("foo")
        process.assert_called_once_with(Group, columns, filters, extra)

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr", mock.Mock())
    def test_incr_saves_to_redis(self):
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = {"times_seen": 1}
        filters = {"pk": 1}
        self.buf.incr(model, columns, filters, extra={"foo": "bar"})
        result = self.buf.conn.hgetall("foo")
        assert result == {
            "e+foo": "S'bar'\np1\n.",
            "f": "(dp1\nS'pk'\np2\nI1\ns.",
            "i+times_seen": "1",
            "m": "mock.Mock",
        }
        pending = self.buf.conn.zrange("b:p", 0, -1)
        assert pending == ["foo"]
        self.buf.incr(model, columns, filters, extra={"foo": "bar"})
        result = self.buf.conn.hgetall("foo")
        assert result == {
            "e+foo": "S'bar'\np1\n.",
            "f": "(dp1\nS'pk'\np2\nI1\ns.",
            "i+times_seen": "2",
            "m": "mock.Mock",
        }
        pending = self.buf.conn.zrange("b:p", 0, -1)
        assert pending == ["foo"]
Esempio n. 13
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer()

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == '1'

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u'\u201d') == '”'

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    def test_process_pending_one_batch(self, process_incr):
        self.buf.incr_batch_size = 5
        with self.buf.cluster.map() as client:
            client.zadd('b:p', 1, 'foo')
            client.zadd('b:p', 2, 'bar')
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['foo', 'bar'],
        })
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    def test_process_pending_multiple_batches(self, process_incr):
        self.buf.incr_batch_size = 2
        with self.buf.cluster.map() as client:
            client.zadd('b:p', 1, 'foo')
            client.zadd('b:p', 2, 'bar')
            client.zadd('b:p', 3, 'baz')
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['foo', 'bar'],
        })
        process_incr.apply_async.assert_any_call(kwargs={
            'batch_keys': ['baz'],
        })
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_bubble_up(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            'foo', {
                'e+foo': "S'bar'\np1\n.",
                'f': "(dp1\nS'pk'\np2\nI1\ns.",
                'i+times_seen': '2',
                'm': 'sentry.models.Group',
            })
        columns = {'times_seen': 2}
        filters = {'pk': 1}
        extra = {'foo': 'bar'}
        self.buf.process('foo')
        process.assert_called_once_with(Group, columns, filters, extra)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key',
                mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr', mock.Mock())
    def test_incr_saves_to_redis(self):
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = 'Mock'
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        result = client.hgetall('foo')
        assert result == {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '1',
            'm': 'mock.Mock',
        }
        pending = client.zrange('b:p', 0, -1)
        assert pending == ['foo']
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        result = client.hgetall('foo')
        assert result == {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '2',
            'm': 'mock.Mock',
        }
        pending = client.zrange('b:p', 0, -1)
        assert pending == ['foo']
Esempio n. 14
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer()

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == '1'

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u'\u201d') == '”'

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr')
    def test_process_pending(self, process_incr):
        with self.buf.cluster.map() as client:
            client.zadd('b:p', 1, 'foo')
            client.zadd('b:p', 2, 'bar')
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(kwargs={'key': 'foo'})
        process_incr.apply_async.assert_any_call(kwargs={'key': 'bar'})
        client = self.buf.cluster.get_routing_client()
        assert client.zrange('b:p', 0, -1) == []

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.base.Buffer.process')
    def test_process_does_bubble_up(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset('foo', {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '2',
            'm': 'sentry.models.Group',
        })
        columns = {'times_seen': 2}
        filters = {'pk': 1}
        extra = {'foo': 'bar'}
        self.buf.process('foo')
        process.assert_called_once_with(Group, columns, filters, extra)

    @mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
    @mock.patch('sentry.buffer.redis.process_incr', mock.Mock())
    def test_incr_saves_to_redis(self):
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = 'Mock'
        columns = {'times_seen': 1}
        filters = {'pk': 1}
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        result = client.hgetall('foo')
        assert result == {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '1',
            'm': 'mock.Mock',
        }
        pending = client.zrange('b:p', 0, -1)
        assert pending == ['foo']
        self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
        result = client.hgetall('foo')
        assert result == {
            'e+foo': "S'bar'\np1\n.",
            'f': "(dp1\nS'pk'\np2\nI1\ns.",
            'i+times_seen': '2',
            'm': 'mock.Mock',
        }
        pending = client.zrange('b:p', 0, -1)
        assert pending == ['foo']
Esempio n. 15
0
 def setUp(self):
     self.buf = RedisBuffer(hosts={0: {'db': 9}})
     self.buf.conn.flushdb()
Esempio n. 16
0
 def setUp(self):
     self.buf = RedisBuffer()
Esempio n. 17
0
 def test_default_host_is_local(self):
     buf = RedisBuffer()
     self.assertEquals(len(buf.conn.hosts), 1)
     self.assertEquals(buf.conn.hosts[0].host, 'localhost')
Esempio n. 18
0
 def setUp(self):
     self.buf = RedisBuffer(hosts={
         0: {'db': 9}
     })
     self.buf.conn.flushdb()
Esempio n. 19
0
class RedisBufferTest(TestCase):
    def setUp(self):
        self.buf = RedisBuffer()

    def test_coerce_val_handles_foreignkeys(self):
        assert self.buf._coerce_val(Project(id=1)) == "1"

    def test_coerce_val_handles_unicode(self):
        assert self.buf._coerce_val(u"\u201d") == "”"

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    def test_process_pending_one_batch(self, process_incr):
        self.buf.incr_batch_size = 5
        with self.buf.cluster.map() as client:
            client.zadd("b:p", 1, "foo")
            client.zadd("b:p", 2, "bar")
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["foo", "bar"]})
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    def test_process_pending_multiple_batches(self, process_incr):
        self.buf.incr_batch_size = 2
        with self.buf.cluster.map() as client:
            client.zadd("b:p", 1, "foo")
            client.zadd("b:p", 2, "bar")
            client.zadd("b:p", 3, "baz")
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["foo", "bar"]})
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["baz"]})
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_does_bubble_up_json(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            "foo",
            {
                "e+foo": '["s","bar"]',
                "e+datetime": '["d","1493791566.000000"]',
                "f": '{"pk": ["i","1"]}',
                "i+times_seen": "2",
                "m": "sentry.models.Group",
            },
        )
        columns = {"times_seen": 2}
        filters = {"pk": 1}
        extra = {
            "foo": "bar",
            "datetime": datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        }
        self.buf.process("foo")
        process.assert_called_once_with(Group, columns, filters, extra)

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.base.Buffer.process")
    def test_process_does_bubble_up_pickle(self, process):
        client = self.buf.cluster.get_routing_client()
        client.hmset(
            "foo",
            {
                "e+foo": "S'bar'\np1\n.",
                "f": "(dp1\nS'pk'\np2\nI1\ns.",
                "i+times_seen": "2",
                "m": "sentry.models.Group",
            },
        )
        columns = {"times_seen": 2}
        filters = {"pk": 1}
        extra = {"foo": "bar"}
        self.buf.process("foo")
        process.assert_called_once_with(Group, columns, filters, extra)

    # this test should be passing once we no longer serialize using pickle
    @pytest.mark.xfail
    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr", mock.Mock())
    def test_incr_saves_to_redis(self):
        now = datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = {"times_seen": 1}
        filters = {"pk": 1, "datetime": now}
        self.buf.incr(model,
                      columns,
                      filters,
                      extra={
                          "foo": "bar",
                          "datetime": now
                      })
        result = client.hgetall("foo")
        assert result == {
            "e+foo": '["s","bar"]',
            "e+datetime": '["d","1493791566.000000"]',
            "f": '{"pk":["i","1"],"datetime":["d","1493791566.000000"]}',
            "i+times_seen": "1",
            "m": "mock.mock.Mock",
        }
        pending = client.zrange("b:p", 0, -1)
        assert pending == ["foo"]
        self.buf.incr(model, columns, filters, extra={"foo": "baz"})
        result = client.hgetall("foo")
        assert result == {
            "e+foo": '["s","baz"]',
            "e+datetime": '["d","1493791566.000000"]',
            "f": '{"pk":["i","1"],"datetime":["d","1493791566.000000"]}',
            "i+times_seen": "2",
            "m": "mock.mock.Mock",
        }
        pending = client.zrange("b:p", 0, -1)
        assert pending == ["foo"]

    @mock.patch("sentry.buffer.redis.RedisBuffer._make_key",
                mock.Mock(return_value="foo"))
    @mock.patch("sentry.buffer.redis.process_incr")
    @mock.patch("sentry.buffer.redis.process_pending")
    def test_process_pending_partitions_none(self, process_pending,
                                             process_incr):
        self.buf.pending_partitions = 2
        with self.buf.cluster.map() as client:
            client.zadd("b:p:0", 1, "foo")
            client.zadd("b:p:1", 1, "bar")
            client.zadd("b:p", 1, "baz")

        # On first pass, we are expecting to do:
        # * process the buffer that doesn't have a partition (b:p)
        # * queue up 2 jobs, one for each partition to process.
        self.buf.process_pending()
        assert len(process_incr.apply_async.mock_calls) == 1
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["baz"]})
        assert len(process_pending.apply_async.mock_calls) == 2
        process_pending.apply_async.mock_calls == [
            mock.call(kwargs={"partition": 0}),
            mock.call(kwargs={"partition": 1}),
        ]

        # Confirm that we've only processed the unpartitioned buffer
        client = self.buf.cluster.get_routing_client()
        assert client.zrange("b:p", 0, -1) == []
        assert client.zrange("b:p:0", 0, -1) != []
        assert client.zrange("b:p:1", 0, -1) != []

        # partition 0
        self.buf.process_pending(partition=0)
        assert len(process_incr.apply_async.mock_calls) == 2
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["foo"]})
        assert client.zrange("b:p:0", 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2

        # partition 1
        self.buf.process_pending(partition=1)
        assert len(process_incr.apply_async.mock_calls) == 3
        process_incr.apply_async.assert_any_call(
            kwargs={"batch_keys": ["bar"]})
        assert client.zrange("b:p:1", 0, -1) == []

        # Make sure we didn't queue up more
        assert len(process_pending.apply_async.mock_calls) == 2