def incr(self, model, columns, filters, extra=None):
        """
        Increment the key by doing the following:

        - Insert/update a hashmap based on (model, columns)
            - Perform an incrby on counters
            - Perform a set (last write wins) on extra
        - Add hashmap key to pending flushes
        """
        # TODO(dcramer): longer term we'd rather not have to serialize values
        # here (unless it's to JSON)
        key = self._make_key(model, filters)
        pending_key = self._make_pending_key_from_key(key)
        # We can't use conn.map() due to wanting to support multiple pending
        # keys (one per Redis partition)
        conn = self.cluster.get_local_client_for_key(key)

        pipe = conn.pipeline()
        pipe.hsetnx(key, 'm', '%s.%s' % (model.__module__, model.__name__))
        pipe.hsetnx(key, 'f', pickle.dumps(filters))
        for column, amount in six.iteritems(columns):
            pipe.hincrby(key, 'i+' + column, amount)

        if extra:
            for column, value in six.iteritems(extra):
                pipe.hset(key, 'e+' + column, pickle.dumps(value))
        pipe.expire(key, self.key_expire)
        pipe.zadd(pending_key, time(), key)
        pipe.execute()
예제 #2
0
파일: redis.py 프로젝트: daevaorn/sentry
    def incr(self, model, columns, filters, extra=None):
        """
        Increment the key by doing the following:

        - Insert/update a hashmap based on (model, columns)
            - Perform an incrby on counters
            - Perform a set (last write wins) on extra
        - Add hashmap key to pending flushes
        """
        # TODO(dcramer): longer term we'd rather not have to serialize values
        # here (unless it's to JSON)
        key = self._make_key(model, filters)
        # We can't use conn.map() due to wanting to support multiple pending
        # keys (one per Redis shard)
        conn = self.cluster.get_local_client_for_key(key)

        pipe = conn.pipeline()
        pipe.hsetnx(key, 'm', '%s.%s' % (model.__module__, model.__name__))
        pipe.hsetnx(key, 'f', pickle.dumps(filters))
        for column, amount in columns.iteritems():
            pipe.hincrby(key, 'i+' + column, amount)

        if extra:
            for column, value in extra.iteritems():
                pipe.hset(key, 'e+' + column, pickle.dumps(value))
        pipe.expire(key, self.key_expire)
        pipe.zadd(self.pending_key, time(), key)
        pipe.execute()
예제 #3
0
파일: redis.py 프로젝트: tuanictu97/sentry
    def incr(self, model, columns, filters, extra=None, signal_only=None):
        """
        Increment the key by doing the following:

        - Insert/update a hashmap based on (model, columns)
            - Perform an incrby on counters
            - Perform a set (last write wins) on extra
            - Perform a set on signal_only (only if True)
        - Add hashmap key to pending flushes
        """

        # TODO(dcramer): longer term we'd rather not have to serialize values
        # here (unless it's to JSON)
        key = self._make_key(model, filters)
        pending_key = self._make_pending_key_from_key(key)
        # We can't use conn.map() due to wanting to support multiple pending
        # keys (one per Redis partition)
        conn = self.cluster.get_local_client_for_key(key)

        pipe = conn.pipeline()
        pipe.hsetnx(key, "m", "%s.%s" % (model.__module__, model.__name__))
        # TODO(dcramer): once this goes live in production, we can kill the pickle path
        # (this is to ensure a zero downtime deploy where we can transition event processing)
        pipe.hsetnx(key, "f", pickle.dumps(filters))
        # pipe.hsetnx(key, 'f', json.dumps(self._dump_values(filters)))
        for column, amount in six.iteritems(columns):
            pipe.hincrby(key, "i+" + column, amount)

        if extra:
            # Group tries to serialize 'score', so we'd need some kind of processing
            # hook here
            # e.g. "update score if last_seen or times_seen is changed"
            for column, value in six.iteritems(extra):
                # TODO(dcramer): once this goes live in production, we can kill the pickle path
                # (this is to ensure a zero downtime deploy where we can transition event processing)
                pipe.hset(key, "e+" + column, pickle.dumps(value))
                # pipe.hset(key, 'e+' + column, json.dumps(self._dump_value(value)))

        if signal_only is True:
            pipe.hset(key, "s", "1")

        pipe.expire(key, self.key_expire)
        pipe.zadd(pending_key, {key: time()})
        pipe.execute()

        metrics.incr(
            "buffer.incr",
            skip_internal=True,
            tags={
                "module": model.__module__,
                "model": model.__name__
            },
        )
예제 #4
0
 def test_incr_does_buffer_extra_to_conn(self):
     model = mock.Mock()
     columns = {'times_seen': 1}
     filters = {'pk': 1}
     self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
     self.assertEquals(self.buf.conn.hget('extra', 'foo'),
                       pickle.dumps('bar'))
예제 #5
0
 def _postWithKey(self, data):
     resp = self.client.post(
         reverse('sentry-store'), {
             'data': base64.b64encode(pickle.dumps(transform(data))),
             'key': settings.KEY,
         })
     return resp
예제 #6
0
 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     # enforce unicode strings to guarantee consistency
     if isinstance(value, str):
         value = unicode(value)
     return compress(pickle.dumps(value))
 def test_incr_does_buffer_extra_to_conn(self):
     model = mock.Mock()
     columns = {'times_seen': 1}
     filters = {'pk': 1}
     self.buffer.incr(model, columns, filters, extra={'foo': 'bar'})
     response = self.buffer._tnt.select(self.sentry_extra_space,
                                        [('extra', 'foo')])
     self.assertEquals(response[0][2], pickle.dumps('bar'))
예제 #8
0
 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     # enforce unicode strings to guarantee consistency
     if isinstance(value, str):
         value = six.text_type(value)
     return compress(pickle.dumps(value))
예제 #9
0
 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     # enforce six.text_type strings to guarantee consistency
     if isinstance(value, six.binary_type):
         value = six.text_type(value)
     # db values need to be in unicode
     return compress(pickle.dumps(value))
예제 #10
0
파일: tests.py 프로젝트: clvrobj/sentry
 def test_process_saves_extra(self):
     group = Group.objects.create(project=Project(id=1))
     columns = {'times_seen': 1}
     filters = {'pk': group.pk}
     the_date = datetime.now() + timedelta(days=5)
     self.buf.conn.set('foo', 1)
     self.buf.conn.hset('extra', 'last_seen', pickle.dumps(the_date))
     self.buf.process(Group, columns, filters)
     group_ = Group.objects.get(pk=group.pk)
     self.assertEquals(group_.last_seen, the_date)
예제 #11
0
 def test_process_saves_extra(self):
     group = Group.objects.create(project=Project(id=1))
     columns = {'times_seen': 1}
     filters = {'pk': group.pk}
     the_date = (timezone.now() + timedelta(days=5)).replace(microsecond=0)
     self.buf.conn.set('foo', 1)
     self.buf.conn.hset('extra', 'last_seen', pickle.dumps(the_date))
     self.buf.process(Group, columns, filters)
     group_ = Group.objects.get(pk=group.pk)
     self.assertEquals(group_.last_seen.replace(microsecond=0), the_date)
예제 #12
0
파일: redis.py 프로젝트: alexandrul/sentry
    def incr(self, model, columns, filters, extra=None):
        """
        Increment the key by doing the following:

        - Insert/update a hashmap based on (model, columns)
            - Perform an incrby on counters
            - Perform a set (last write wins) on extra
        - Add hashmap key to pending flushes
        """
        # TODO(dcramer): longer term we'd rather not have to serialize values
        # here (unless it's to JSON)
        key = self._make_key(model, filters)
        pending_key = self._make_pending_key_from_key(key)
        # We can't use conn.map() due to wanting to support multiple pending
        # keys (one per Redis partition)
        conn = self.cluster.get_local_client_for_key(key)

        pipe = conn.pipeline()
        pipe.hsetnx(key, 'm', '%s.%s' % (model.__module__, model.__name__))
        # TODO(dcramer): once this goes live in production, we can kill the pickle path
        # (this is to ensure a zero downtime deploy where we can transition event processing)
        pipe.hsetnx(key, 'f', pickle.dumps(filters))
        # pipe.hsetnx(key, 'f', json.dumps(self._dump_values(filters)))
        for column, amount in six.iteritems(columns):
            pipe.hincrby(key, 'i+' + column, amount)

        if extra:
            # Group tries to serialize 'score', so we'd need some kind of processing
            # hook here
            # e.g. "update score if last_seen or times_seen is changed"
            for column, value in six.iteritems(extra):
                # TODO(dcramer): once this goes live in production, we can kill the pickle path
                # (this is to ensure a zero downtime deploy where we can transition event processing)
                pipe.hset(key, 'e+' + column, pickle.dumps(value))
                # pipe.hset(key, 'e+' + column, json.dumps(self._dump_value(value)))
        pipe.expire(key, self.key_expire)
        pipe.zadd(pending_key, time(), key)
        pipe.execute()

        metrics.incr('buffer.incr', skip_internal=True, tags={
            'module': model.__module__,
            'model': model.__name__,
        })
예제 #13
0
    def test_handles_gettext_lazy(self):
        def fake_gettext(to_translate):
            return u'Igpay Atinlay'

        fake_gettext_lazy = lazy(fake_gettext, str)

        self.assertEquals(
            pickle.loads(pickle.dumps(
                    transform(fake_gettext_lazy("something")))),
            u'Igpay Atinlay')
예제 #14
0
파일: tests.py 프로젝트: Kayle009/sentry
    def test_event_node_id(self):
        # Create an event without specifying node_id. A node_id should be generated
        e1 = Event(project_id=1, event_id='abc', data={'foo': 'bar'})
        e1.save()
        e1_node_id = e1.data.id
        assert e1.data.id is not None, "We should have generated a node_id for this event"
        e1_body = nodestore.get(e1_node_id)
        assert e1_body == {'foo': 'bar'}, "The event body should be in nodestore"

        e1 = Event.objects.get(project_id=1, event_id='abc')
        assert e1.data.data == {'foo': 'bar'}, "The event body should be loaded from nodestore"
        assert e1.data.id == e1_node_id, "The event's node_id should be the same after load"

        # Create another event that references the same nodestore object as the first event.
        e2 = Event(project_id=1, event_id='def', data={'node_id': e1_node_id})
        assert e2.data.id == e1_node_id, "The event should use the provided node_id"
        e2_body = nodestore.get(e1_node_id)
        assert e2_body == {'foo': 'bar'}, "The event body should be in nodestore already"
        e2.save()
        e2_body = nodestore.get(e1_node_id)
        assert e2_body == {'foo': 'bar'}, "The event body should not be overwritten by save"

        e2 = Event.objects.get(project_id=1, event_id='def')
        assert e2.data.data == {'foo': 'bar'}, "The event body should be loaded from nodestore"
        assert e2.data.id == e1_node_id, "The event's node_id should be the same after load"

        # Create an event with a new event body that specifies the node_id to use.
        e3 = Event(project_id=1, event_id='ghi', data={'baz': 'quux', 'node_id': '1:ghi'})
        assert e3.data.id == '1:ghi', "Event should have the specified node_id"
        assert e3.data.data == {'baz': 'quux'}, "Event body should be the one provided (sans node_id)"
        e3.save()
        e3_body = nodestore.get('1:ghi')
        assert e3_body == {'baz': 'quux'}, "Event body should be saved to nodestore"

        e3 = Event.objects.get(project_id=1, event_id='ghi')
        assert e3.data.data == {'baz': 'quux'}, "Event body should be loaded from nodestore"
        assert e3.data.id == '1:ghi', "Loaded event should have the correct node_id"

        # Try load it again, but using the pickled/compressed string we would expect to find
        # in the column
        e3_pickled_id = compress(pickle.dumps({'node_id': '1:ghi'}))
        e3 = Event(project_id=1, event_id='jkl', data=e3_pickled_id)
        assert e3.data.data == {'baz': 'quux'}, "Event body should be loaded from nodestore"

        # Event with no data should not be saved (or loaded) from nodestore
        e4 = Event(project_id=1, event_id='mno', data=None)
        e4.save()
        assert nodestore.get('1:mno') is None, "We should not have saved anything to nodestore"
        e4 = Event.objects.get(project_id=1, event_id='mno')
        assert e4.data.id is None
        assert e4.data.data == {}  # NodeData returns {} by default
        Event.objects.bind_nodes([e4], 'data')
        assert e4.data.id is None
        assert e4.data.data == {}
예제 #15
0
    def incr(self, model, columns, filters, extra=None):
        for column, amount in columns.iteritems():
            key = self._make_key(model, filters, column)
            call_args = (key, str(amount), str(self.key_expire))
            self._tnt.call('box.sentry_buffer.incr', call_args)

        if extra:
            key = self._make_extra_key(model, filters)
            for column, value in extra.iteritems():
                call_args = (key, column, pickle.dumps(value),
                             str(self.key_expire))
                self._tnt.call('box.sentry_buffer.hset', call_args)
        super(TarantoolBuffer, self).incr(model, columns, filters, extra)
    def get_prep_value(self, value):
        if not value and self.null:
            # save ourselves some storage
            return None

        # TODO(dcramer): we should probably do this more intelligently
        # and manually
        if not value.id:
            value.id = nodestore.create(value.data)
        else:
            nodestore.set(value.id, value.data)

        return compress(pickle.dumps({'node_id': value.id}))
예제 #17
0
    def get_prep_value(self, value):
        if not value and self.null:
            # save ourselves some storage
            return None

        # TODO(dcramer): we should probably do this more intelligently
        # and manually
        if not value.id:
            value.id = nodestore.create(value.data)
        else:
            nodestore.set(value.id, value.data)

        return compress(pickle.dumps({'node_id': value.id}))
예제 #18
0
    def incr(self, model, columns, filters, extra=None):
        with self.conn.map() as conn:
            for column, amount in columns.iteritems():
                key = self._make_key(model, filters, column)
                conn.incr(key, amount)
                conn.expire(key, self.key_expire)

            # Store extra in a hashmap so it can easily be removed
            if extra:
                key = self._make_extra_key(model, filters)
                for column, value in extra.iteritems():
                    conn.hset(key, column, pickle.dumps(value))
                    conn.expire(key, self.key_expire)
        super(RedisBuffer, self).incr(model, columns, filters, extra)
예제 #19
0
파일: redis.py 프로젝트: raptium/sentry
    def incr(self, model, columns, filters, extra=None):
        with self.conn.map() as conn:
            for column, amount in columns.iteritems():
                key = self._make_key(model, filters, column)
                conn.incr(key, amount)
                conn.expire(key, self.key_expire)

            # Store extra in a hashmap so it can easily be removed
            if extra:
                key = self._make_extra_key(model, filters)
                for column, value in extra.iteritems():
                    conn.hset(key, column, pickle.dumps(value))
                    conn.expire(key, self.key_expire)
        super(RedisBuffer, self).incr(model, columns, filters, extra)
예제 #20
0
    def test_does_transition_data_to_node(self):
        group = self.group
        data = {'key': 'value'}

        query_bits = [
            "INSERT INTO sentry_message (group_id, project_id, data, message, datetime)",
            "VALUES(%s, %s, %s, %s, %s)",
        ]
        params = [
            group.id, group.project_id,
            compress(pickle.dumps(data)), 'test',
            timezone.now()
        ]

        # This is pulled from SQLInsertCompiler
        if connection.features.can_return_id_from_insert:
            r_fmt, r_params = connection.ops.return_insert_id()
            if r_fmt:
                query_bits.append(r_fmt % Event._meta.pk.column)
                params += r_params

        cursor = connection.cursor()
        cursor.execute(' '.join(query_bits), params)

        if connection.features.can_return_id_from_insert:
            event_id = connection.ops.fetch_returned_insert_id(cursor)
        else:
            event_id = connection.ops.last_insert_id(cursor,
                                                     Event._meta.db_table,
                                                     Event._meta.pk.column)

        event = Event.objects.get(id=event_id)
        assert type(event.data) == NodeData
        assert event.data == data
        assert event.data.id is None

        event.save()

        assert event.data == data
        assert event.data.id is not None

        node_id = event.data.id
        event = Event.objects.get(id=event_id)

        Event.objects.bind_nodes([event], 'data')

        assert event.data == data
        assert event.data.id == node_id
    def test_process_saves_extra(self, process):
        group = Group(project=Project(id=1))
        columns = {'times_seen': 1}
        filters = {'pk': group.pk}
        the_date = (timezone.now() + timedelta(days=5)).replace(microsecond=0)
        self.buffer._tnt.insert(self.sentry_space, ('foo', 1, 0L))
        self.buffer._tnt.insert(
            self.sentry_extra_space,
            ('extra', 'last_seen', pickle.dumps(the_date), 0L))
        self.buffer.process(Group, columns, filters)
        process.assert_called_once_with(Group, columns, filters,
                                        {'last_seen': the_date})

        lua_code = 'return box.space[%s]:len()' % (self.sentry_extra_space, )
        response = self.buffer._tnt.call('box.dostring', lua_code)
        self.assertEqual(0, int(response[0][0]))
예제 #22
0
파일: node.py 프로젝트: getsentry/sentry
    def get_prep_value(self, value):
        """
            Prepares the NodeData to be written in a Model.save() call.

            Makes sure the event body is written to nodestore and
            returns the node_id reference to be written to rowstore.
        """
        if not value and self.null:
            # save ourselves some storage
            return None

        if value.id is None:
            value.id = self.id_func()

        value.save()
        return compress(pickle.dumps({'node_id': value.id}))
예제 #23
0
파일: node.py 프로젝트: webZW/sentry
    def get_prep_value(self, value):
        """
            Prepares the NodeData to be written in a Model.save() call.

            Makes sure the event body is written to nodestore and
            returns the node_id reference to be written to rowstore.
        """
        if not value and self.null:
            # save ourselves some storage
            return None

        if value.id is None:
            value.id = self.id_func()

        value.save()
        return compress(pickle.dumps({'node_id': value.id}))
예제 #24
0
파일: tests.py 프로젝트: alexandrul/sentry
    def test_does_transition_data_to_node(self):
        group = self.group
        data = {'key': 'value'}

        query_bits = [
            "INSERT INTO sentry_message (group_id, project_id, data, message, datetime)",
            "VALUES(%s, %s, %s, %s, %s)",
        ]
        params = [group.id, group.project_id, compress(pickle.dumps(data)), 'test', timezone.now()]

        # This is pulled from SQLInsertCompiler
        if connection.features.can_return_id_from_insert:
            r_fmt, r_params = connection.ops.return_insert_id()
            if r_fmt:
                query_bits.append(r_fmt % Event._meta.pk.column)
                params += r_params

        cursor = connection.cursor()
        cursor.execute(' '.join(query_bits), params)

        if connection.features.can_return_id_from_insert:
            event_id = connection.ops.fetch_returned_insert_id(cursor)
        else:
            event_id = connection.ops.last_insert_id(
                cursor, Event._meta.db_table, Event._meta.pk.column
            )

        event = Event.objects.get(id=event_id)
        assert type(event.data) == NodeData
        assert event.data == data
        assert event.data.id is None

        event.save()

        assert event.data == data
        assert event.data.id is not None

        node_id = event.data.id
        event = Event.objects.get(id=event_id)

        Event.objects.bind_nodes([event], 'data')

        assert event.data == data
        assert event.data.id == node_id
예제 #25
0
    def get_prep_value(self, value):
        if not value and self.null:
            # save ourselves some storage
            return None

        # We can't put our wrappers into the nodestore, so we need to
        # ensure that the data is converted into a plain old dict
        data = value.data
        if isinstance(data, CANONICAL_TYPES):
            data = dict(data.items())

        # TODO(dcramer): we should probably do this more intelligently
        # and manually
        if not value.id:
            value.id = nodestore.create(data)
        else:
            nodestore.set(value.id, data)

        return compress(pickle.dumps({'node_id': value.id}))
예제 #26
0
    def test_pickling_compat(self):
        event = self.store_event(
            data={
                "message": "Hello World!",
                "tags": {
                    "logger": "foobar",
                    "site": "foo",
                    "server_name": "bar"
                },
            },
            project_id=self.project.id,
        )

        # Ensure we load and memoize the interfaces as well.
        assert len(event.interfaces) > 0

        # When we pickle an event we need to make sure our canonical code
        # does not appear here or it breaks old workers.
        data = pickle.dumps(event, protocol=2)
        assert b"canonical" not in data

        # For testing we remove the backwards compat support in the
        # `NodeData` as well.
        nodedata_getstate = NodeData.__getstate__
        del NodeData.__getstate__

        # Old worker loading
        try:
            event2 = pickle.loads(data)
            assert event2.data == event.data
        finally:
            NodeData.__getstate__ = nodedata_getstate

        # New worker loading
        event2 = pickle.loads(data)
        assert event2.data == event.data
예제 #27
0
 def get_prep_value(self, value):
     if value is None:
         return
     return base64.b64encode(pickle.dumps(transform(value)).encode("zlib"))
예제 #28
0
def test_pickle():
    for cls in [TagKey, TagValue, GroupTagKey, GroupTagValue]:
        value = cls(**{name: 1 for name in cls.__slots__})
        pickle.loads(pickle.dumps(value)) == value
예제 #29
0
파일: models.py 프로젝트: Supy/sentry
 def get_prep_value(self, value):
     if value is None:
         return
     return base64.b64encode(pickle.dumps(value).encode('zlib'))
예제 #30
0
    def test_event_node_id(self):
        # Create an event without specifying node_id. A node_id should be generated
        e1 = Event(project_id=1, event_id="abc", data={"foo": "bar"})
        e1.save()
        e1_node_id = e1.data.id
        assert e1.data.id is not None, "We should have generated a node_id for this event"
        e1_body = nodestore.get(e1_node_id)
        e1.data.save()
        e1_body = nodestore.get(e1_node_id)
        assert e1_body == {
            "foo": "bar"
        }, "The event body should be in nodestore"

        e1 = Event.objects.get(project_id=1, event_id="abc")
        assert e1.data.data == {
            "foo": "bar"
        }, "The event body should be loaded from nodestore"
        assert e1.data.id == e1_node_id, "The event's node_id should be the same after load"

        # Create another event that references the same nodestore object as the first event.
        e2 = Event(project_id=1, event_id="def", data={"node_id": e1_node_id})
        assert e2.data.id == e1_node_id, "The event should use the provided node_id"
        e2_body = nodestore.get(e1_node_id)
        assert e2_body == {
            "foo": "bar"
        }, "The event body should be in nodestore already"
        e2.save()
        e2_body = nodestore.get(e1_node_id)
        assert e2_body == {
            "foo": "bar"
        }, "The event body should not be overwritten by save"

        e2 = Event.objects.get(project_id=1, event_id="def")
        assert e2.data.data == {
            "foo": "bar"
        }, "The event body should be loaded from nodestore"
        assert e2.data.id == e1_node_id, "The event's node_id should be the same after load"

        # Create an event with a new event body that specifies the node_id to use.
        e3 = Event(project_id=1,
                   event_id="ghi",
                   data={
                       "baz": "quux",
                       "node_id": "1:ghi"
                   })
        assert e3.data.id == "1:ghi", "Event should have the specified node_id"
        assert e3.data.data == {
            "baz": "quux"
        }, "Event body should be the one provided (sans node_id)"
        e3.save()
        e3_body = nodestore.get("1:ghi")
        e3.data.save()
        e3_body = nodestore.get("1:ghi")
        assert e3_body == {
            "baz": "quux"
        }, "Event body should be saved to nodestore"

        e3 = Event.objects.get(project_id=1, event_id="ghi")
        assert e3.data.data == {
            "baz": "quux"
        }, "Event body should be loaded from nodestore"
        assert e3.data.id == "1:ghi", "Loaded event should have the correct node_id"

        # Try load it again, but using the pickled/compressed string we would expect to find
        # in the column
        e3_pickled_id = compress(pickle.dumps({"node_id": "1:ghi"}))
        e3 = Event(project_id=1, event_id="jkl", data=e3_pickled_id)
        assert e3.data.data == {
            "baz": "quux"
        }, "Event body should be loaded from nodestore"

        # Event with no data should not be saved (or loaded) from nodestore
        e4 = Event(project_id=1, event_id="mno", data=None)
        e4.save()
        e4.data.save()
        assert nodestore.get(
            "1:mno") is None, "We should not have saved anything to nodestore"
        e4 = Event.objects.get(project_id=1, event_id="mno")
        assert e4.data.id is None
        assert e4.data.data == {}  # NodeData returns {} by default
        e4.bind_node_data()
        assert e4.data.id is None
        assert e4.data.data == {}
예제 #31
0
 def encode(self, value):
     return zlib.compress(pickle.dumps(value))
예제 #32
0
    def test_event_node_id(self):
        # Create an event without specifying node_id. A node_id should be generated
        e1 = Event(project_id=1, event_id='abc', data={'foo': 'bar'})
        e1.save()
        e1_node_id = e1.data.id
        assert e1.data.id is not None, "We should have generated a node_id for this event"
        e1_body = nodestore.get(e1_node_id)
        assert e1_body == {
            'foo': 'bar'
        }, "The event body should be in nodestore"

        e1 = Event.objects.get(project_id=1, event_id='abc')
        assert e1.data.data == {
            'foo': 'bar'
        }, "The event body should be loaded from nodestore"
        assert e1.data.id == e1_node_id, "The event's node_id should be the same after load"

        # Create another event that references the same nodestore object as the first event.
        e2 = Event(project_id=1, event_id='def', data={'node_id': e1_node_id})
        assert e2.data.id == e1_node_id, "The event should use the provided node_id"
        e2_body = nodestore.get(e1_node_id)
        assert e2_body == {
            'foo': 'bar'
        }, "The event body should be in nodestore already"
        e2.save()
        e2_body = nodestore.get(e1_node_id)
        assert e2_body == {
            'foo': 'bar'
        }, "The event body should not be overwritten by save"

        e2 = Event.objects.get(project_id=1, event_id='def')
        assert e2.data.data == {
            'foo': 'bar'
        }, "The event body should be loaded from nodestore"
        assert e2.data.id == e1_node_id, "The event's node_id should be the same after load"

        # Create an event with a new event body that specifies the node_id to use.
        e3 = Event(project_id=1,
                   event_id='ghi',
                   data={
                       'baz': 'quux',
                       'node_id': '1:ghi'
                   })
        assert e3.data.id == '1:ghi', "Event should have the specified node_id"
        assert e3.data.data == {
            'baz': 'quux'
        }, "Event body should be the one provided (sans node_id)"
        e3.save()
        e3_body = nodestore.get('1:ghi')
        assert e3_body == {
            'baz': 'quux'
        }, "Event body should be saved to nodestore"

        e3 = Event.objects.get(project_id=1, event_id='ghi')
        assert e3.data.data == {
            'baz': 'quux'
        }, "Event body should be loaded from nodestore"
        assert e3.data.id == '1:ghi', "Loaded event should have the correct node_id"

        # Try load it again, but using the pickled/compressed string we would expect to find
        # in the column
        e3_pickled_id = compress(pickle.dumps({'node_id': '1:ghi'}))
        e3 = Event(project_id=1, event_id='jkl', data=e3_pickled_id)
        assert e3.data.data == {
            'baz': 'quux'
        }, "Event body should be loaded from nodestore"

        # Event with no data should not be saved (or loaded) from nodestore
        e4 = Event(project_id=1, event_id='mno', data=None)
        e4.save()
        assert nodestore.get(
            '1:mno') is None, "We should not have saved anything to nodestore"
        e4 = Event.objects.get(project_id=1, event_id='mno')
        assert e4.data.id is None
        assert e4.data.data == {}  # NodeData returns {} by default
        Event.objects.bind_nodes([e4], 'data')
        assert e4.data.id is None
        assert e4.data.data == {}
예제 #33
0
파일: base.py 프로젝트: Crowdbooster/sentry
 def _postWithKey(self, data, key=None):
     resp = self.client.post(reverse('sentry-api-store'), {
         'data': base64.b64encode(pickle.dumps(data)),
         'key': settings.KEY,
     })
     return resp
예제 #34
0
class TestDjangoNodeStorage:
    def setup_method(self):
        self.ns = DjangoNodeStorage()

    @pytest.mark.parametrize(
        "node_data",
        [
            compress(b'{"foo": "bar"}'),
            compress(pickle.dumps({"foo": "bar"})),
            # hardcoded pickle value from python 3.6
            compress(
                b"\x80\x03}q\x00X\x03\x00\x00\x00fooq\x01X\x03\x00\x00\x00barq\x02s."
            ),
            # hardcoded pickle value from python 2.7
            compress(b"(dp0\nS'foo'\np1\nS'bar'\np2\ns."),
        ],
    )
    def test_get(self, node_data):
        node = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33",
                                   data=node_data)

        result = self.ns.get(node.id)
        assert result == {"foo": "bar"}

    def test_get_multi(self):
        Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33",
                            data=compress(b'{"foo": "bar"}'))
        Node.objects.create(id="5394aa025b8e401ca6bc3ddee3130edc",
                            data=compress(b'{"foo": "baz"}'))

        result = self.ns.get_multi([
            "d2502ebbd7df41ceba8d3275595cac33",
            "5394aa025b8e401ca6bc3ddee3130edc"
        ])
        assert result == {
            "d2502ebbd7df41ceba8d3275595cac33": {
                "foo": "bar"
            },
            "5394aa025b8e401ca6bc3ddee3130edc": {
                "foo": "baz"
            },
        }

    def test_set(self):
        self.ns.set("d2502ebbd7df41ceba8d3275595cac33", {"foo": "bar"})
        assert Node.objects.get(id="d2502ebbd7df41ceba8d3275595cac33"
                                ).data == compress(b'{"foo":"bar"}')

    def test_delete(self):
        node = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33",
                                   data=b'{"foo": "bar"}')

        self.ns.delete(node.id)
        assert not Node.objects.filter(id=node.id).exists()

    def test_delete_multi(self):
        node = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33",
                                   data=b'{"foo": "bar"}')

        self.ns.delete_multi([node.id])
        assert not Node.objects.filter(id=node.id).exists()

    def test_cleanup(self):
        now = timezone.now()
        cutoff = now - timedelta(days=1)

        node = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33",
                                   timestamp=now,
                                   data=b'{"foo": "bar"}')

        node2 = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac34",
                                    timestamp=cutoff,
                                    data=b'{"foo": "bar"}')

        self.ns.cleanup(cutoff)

        assert Node.objects.filter(id=node.id).exists()
        assert not Node.objects.filter(id=node2.id).exists()

    def test_cache(self):
        node_1 = ("a" * 32, {"foo": "a"})
        node_2 = ("b" * 32, {"foo": "b"})
        node_3 = ("c" * 32, {"foo": "c"})

        for node_id, data in [node_1, node_2, node_3]:
            Node.objects.create(id=node_id,
                                data=compress(json_dumps(data).encode("utf8")))

        # Get / get multi populates cache
        assert self.ns.get(node_1[0]) == node_1[1]
        assert self.ns.get_multi([node_2[0], node_3[0]]) == {
            node_2[0]: node_2[1],
            node_3[0]: node_3[1],
        }
        with mock.patch.object(Node.objects, "get") as mock_get:
            assert self.ns.get(node_1[0]) == node_1[1]
            assert self.ns.get(node_2[0]) == node_2[1]
            assert self.ns.get(node_3[0]) == node_3[1]
            assert mock_get.call_count == 0

        with mock.patch.object(Node.objects, "filter") as mock_filter:
            assert self.ns.get_multi([node_1[0], node_2[0], node_3[0]])
            assert mock_filter.call_count == 0

        # Manually deleted item should still retreivable from cache
        Node.objects.get(id=node_1[0]).delete()
        assert self.ns.get(node_1[0]) == node_1[1]
        assert self.ns.get_multi([node_1[0], node_2[0]]) == {
            node_1[0]: node_1[1],
            node_2[0]: node_2[1],
        }

        # Deletion clars cache
        self.ns.delete(node_1[0])
        assert self.ns.get_multi([node_1[0], node_2[0]]) == {
            node_2[0]: node_2[1]
        }
        self.ns.delete_multi([node_1[0], node_2[0]])
        assert self.ns.get_multi([node_1[0], node_2[0]]) == {}

        # Setting the item updates cache
        new_value = {"event_id": "d" * 32}
        self.ns.set(node_1[0], new_value)
        with mock.patch.object(Node.objects, "get") as mock_get:
            assert self.ns.get(node_1[0]) == new_value
            assert mock_get.call_count == 0

        # Missing rows are never cached
        assert self.ns.get("node_4") is None
        with mock.patch.object(Node.objects, "get") as mock_get:
            mock_get.side_effect = Node.DoesNotExist
            self.ns.get("node_4")
            self.ns.get("node_4")
            assert mock_get.call_count == 2
예제 #35
0
파일: models.py 프로젝트: asavoy/sentry
 def get_prep_value(self, value):
     if value is None:
         return
     return base64.b64encode(pickle.dumps(value).encode('zlib'))
예제 #36
0
 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     return compress(pickle.dumps(value))
예제 #37
0
파일: models.py 프로젝트: DouweM/sentry
 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     return base64.b64encode(pickle.dumps(value).encode('zlib'))
예제 #38
0
파일: tests.py 프로젝트: clvrobj/sentry
 def test_incr_does_buffer_extra_to_conn(self):
     model = mock.Mock()
     columns = {'times_seen': 1}
     filters = {'pk': 1}
     self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
     self.assertEquals(self.buf.conn.hget('extra', 'foo'), pickle.dumps('bar'))
예제 #39
0
파일: codecs.py 프로젝트: 280185386/sentry
 def encode(self, value):
     return zlib.compress(pickle.dumps(value))
예제 #40
0
 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     return compress(pickle.dumps(value))