Ejemplo n.º 1
0
    def process(self, key):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not client.set(lock_key, '1', nx=True, ex=10):
            metrics.incr('buffer.revoked', tags={'reason': 'locked'})
            self.logger.info('Skipped process on %s; unable to get lock', key)
            return

        conn = self.cluster.get_local_client_for_key(key)
        pipe = conn.pipeline()
        pipe.hgetall(key)
        pipe.zrem(self.pending_key, key)
        pipe.delete(key)
        values = pipe.execute()[0]

        if not values:
            metrics.incr('buffer.revoked', tags={'reason': 'empty'})
            self.logger.info('Skipped process on %s; no values found', key)
            return

        model = import_string(values['m'])
        filters = pickle.loads(values['f'])
        incr_values = {}
        extra_values = {}
        for k, v in values.iteritems():
            if k.startswith('i+'):
                incr_values[k[2:]] = int(v)
            elif k.startswith('e+'):
                extra_values[k[2:]] = pickle.loads(v)

        super(RedisBuffer, self).process(model, incr_values, filters, extra_values)
Ejemplo n.º 2
0
    def process(self, key):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not client.set(lock_key, '1', nx=True, ex=10):
            metrics.incr('buffer.revoked', tags={'reason': 'locked'})
            self.logger.info('Skipped process on %s; unable to get lock', key)
            return

        conn = self.cluster.get_local_client_for_key(key)
        pipe = conn.pipeline()
        pipe.hgetall(key)
        pipe.zrem(self.pending_key, key)
        pipe.delete(key)
        values = pipe.execute()[0]

        if not values:
            metrics.incr('buffer.revoked', tags={'reason': 'empty'})
            self.logger.info('Skipped process on %s; no values found', key)
            return

        model = import_string(values['m'])
        filters = pickle.loads(values['f'])
        incr_values = {}
        extra_values = {}
        for k, v in values.iteritems():
            if k.startswith('i+'):
                incr_values[k[2:]] = int(v)
            elif k.startswith('e+'):
                extra_values[k[2:]] = pickle.loads(v)

        super(RedisBuffer, self).process(model, incr_values, filters,
                                         extra_values)
Ejemplo n.º 3
0
    def process(self, key):
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not self.conn.setnx(lock_key, '1'):
            return
        self.conn.expire(lock_key, 10)

        with self.conn.map() as conn:
            values = conn.hgetall(key)
            conn.delete(key)

        if not values:
            return

        model = import_string(values['m'])
        filters = pickle.loads(values['f'])
        incr_values = {}
        extra_values = {}
        for k, v in values.iteritems():
            if k.startswith('i+'):
                incr_values[k[2:]] = int(v)
            elif k.startswith('e+'):
                extra_values[k[2:]] = pickle.loads(v)

        super(RedisBuffer, self).process(model, incr_values, filters,
                                         extra_values)
Ejemplo n.º 4
0
    def process(self, key):
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not self.conn.setnx(lock_key, '1'):
            return
        self.conn.expire(lock_key, 10)

        with self.conn.map() as conn:
            values = conn.hgetall(key)
            conn.delete(key)

        if not values:
            return

        model = import_string(values['m'])
        filters = pickle.loads(values['f'])
        incr_values = {}
        extra_values = {}
        for k, v in values.iteritems():
            if k.startswith('i+'):
                incr_values[k[2:]] = int(v)
            elif k.startswith('e+'):
                extra_values[k[2:]] = pickle.loads(v)

        super(RedisBuffer, self).process(model, incr_values, filters, extra_values)
Ejemplo n.º 5
0
    def process(self, key):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not client.set(lock_key, '1', nx=True, ex=10):
            return

        with self.cluster.map() as conn:
            values = conn.hgetall(key)
            conn.delete(key)

        if not values.value:
            return

        model = import_string(values.value['m'])
        filters = pickle.loads(values.value['f'])
        incr_values = {}
        extra_values = {}
        for k, v in values.value.iteritems():
            if k.startswith('i+'):
                incr_values[k[2:]] = int(v)
            elif k.startswith('e+'):
                extra_values[k[2:]] = pickle.loads(v)

        super(RedisBuffer, self).process(model, incr_values, filters, extra_values)
Ejemplo n.º 6
0
    def _process_single_incr(self, key):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not client.set(lock_key, "1", nx=True, ex=10):
            metrics.incr("buffer.revoked", tags={"reason": "locked"}, skip_internal=False)
            self.logger.debug("buffer.revoked.locked", extra={"redis_key": key})
            return

        pending_key = self._make_pending_key_from_key(key)

        try:
            conn = self.cluster.get_local_client_for_key(key)
            pipe = conn.pipeline()
            pipe.hgetall(key)
            pipe.zrem(pending_key, key)
            pipe.delete(key)
            values = pipe.execute()[0]

            # XXX(python3): In python2 this isn't as important since redis will
            # return string tyes (be it, byte strings), but in py3 we get bytes
            # back, and really we just want to deal with keys as strings.
            values = {force_text(k): v for k, v in six.iteritems(values)}

            if not values:
                metrics.incr("buffer.revoked", tags={"reason": "empty"}, skip_internal=False)
                self.logger.debug("buffer.revoked.empty", extra={"redis_key": key})
                return

            # XXX(py3): Note that ``import_string`` explicitly wants a str in
            # python2, so we'll decode (for python3) and then translate back to
            # a byte string (in python2) for import_string.
            model = import_string(str(values.pop("m").decode("utf-8")))  # NOQA

            if values["f"].startswith(b"{"):
                filters = self._load_values(json.loads(values.pop("f").decode("utf-8")))
            else:
                # TODO(dcramer): legacy pickle support - remove in Sentry 9.1
                filters = pickle.loads(values.pop("f"))

            incr_values = {}
            extra_values = {}
            signal_only = None
            for k, v in six.iteritems(values):
                if k.startswith("i+"):
                    incr_values[k[2:]] = int(v)
                elif k.startswith("e+"):
                    if v.startswith(b"["):
                        extra_values[k[2:]] = self._load_value(json.loads(v.decode("utf-8")))
                    else:
                        # TODO(dcramer): legacy pickle support - remove in Sentry 9.1
                        extra_values[k[2:]] = pickle.loads(v)
                elif k == "s":
                    signal_only = bool(int(v))  # Should be 1 if set

            super(RedisBuffer, self).process(model, incr_values, filters, extra_values, signal_only)
        finally:
            client.delete(lock_key)
Ejemplo n.º 7
0
    def _process_single_incr(self, key):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not client.set(lock_key, "1", nx=True, ex=10):
            metrics.incr("buffer.revoked",
                         tags={"reason": "locked"},
                         skip_internal=False)
            self.logger.debug("buffer.revoked.locked",
                              extra={"redis_key": key})
            return

        pending_key = self._make_pending_key_from_key(key)

        try:
            conn = self.cluster.get_local_client_for_key(key)
            pipe = conn.pipeline()
            pipe.hgetall(key)
            pipe.zrem(pending_key, key)
            pipe.delete(key)
            values = pipe.execute()[0]

            if not values:
                metrics.incr("buffer.revoked",
                             tags={"reason": "empty"},
                             skip_internal=False)
                self.logger.debug("buffer.revoked.empty",
                                  extra={"redis_key": key})
                return

            model = import_string(values.pop("m"))
            if values["f"].startswith("{"):
                filters = self._load_values(json.loads(values.pop("f")))
            else:
                # TODO(dcramer): legacy pickle support - remove in Sentry 9.1
                filters = pickle.loads(values.pop("f"))

            incr_values = {}
            extra_values = {}
            signal_only = None
            for k, v in six.iteritems(values):
                if k.startswith("i+"):
                    incr_values[k[2:]] = int(v)
                elif k.startswith("e+"):
                    if v.startswith("["):
                        extra_values[k[2:]] = self._load_value(json.loads(v))
                    else:
                        # TODO(dcramer): legacy pickle support - remove in Sentry 9.1
                        extra_values[k[2:]] = pickle.loads(v)
                elif k == "s":
                    signal_only = bool(int(v))  # Should be 1 if set

            super(RedisBuffer, self).process(model, incr_values, filters,
                                             extra_values, signal_only)
        finally:
            client.delete(lock_key)
Ejemplo n.º 8
0
    def _process_single_incr(self, key):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not client.set(lock_key, '1', nx=True, ex=10):
            metrics.incr('buffer.revoked',
                         tags={'reason': 'locked'},
                         skip_internal=False)
            self.logger.debug('buffer.revoked.locked',
                              extra={'redis_key': key})
            return

        pending_key = self._make_pending_key_from_key(key)

        try:
            conn = self.cluster.get_local_client_for_key(key)
            pipe = conn.pipeline()
            pipe.hgetall(key)
            pipe.zrem(pending_key, key)
            pipe.delete(key)
            values = pipe.execute()[0]

            if not values:
                metrics.incr('buffer.revoked',
                             tags={'reason': 'empty'},
                             skip_internal=False)
                self.logger.debug('buffer.revoked.empty',
                                  extra={'redis_key': key})
                return

            model = import_string(values.pop('m'))
            if values['f'].startswith('{'):
                filters = self._load_values(json.loads(values.pop('f')))
            else:
                # TODO(dcramer): legacy pickle support - remove in Sentry 9.1
                filters = pickle.loads(values.pop('f'))

            incr_values = {}
            extra_values = {}
            for k, v in six.iteritems(values):
                if k.startswith('i+'):
                    incr_values[k[2:]] = int(v)
                elif k.startswith('e+'):
                    if v.startswith('['):
                        extra_values[k[2:]] = self._load_value(json.loads(v))
                    else:
                        # TODO(dcramer): legacy pickle support - remove in Sentry 9.1
                        extra_values[k[2:]] = pickle.loads(v)

            super(RedisBuffer, self).process(model, incr_values, filters,
                                             extra_values)
        finally:
            client.delete(lock_key)
Ejemplo n.º 9
0
 def to_python(self, value):
     if isinstance(value, basestring) and value:
         try:
             value = pickle.loads(base64.b64decode(value).decode('zlib'))
         except Exception, e:
             logger.exception(e)
             return {}
Ejemplo n.º 10
0
 def to_python(self, value):
     if isinstance(value, basestring) and value:
         try:
             value = pickle.loads(decompress(value))
         except Exception, e:
             logger.exception(e)
             return {}
Ejemplo n.º 11
0
 def to_python(self, value):
     if isinstance(value, basestring) and value:
         try:
             value = pickle.loads(decompress(value))
         except Exception, e:
             logger.exception(e)
             return {}
Ejemplo n.º 12
0
    def process(self, model, columns, filters, extra=None):
        lock_key = self._make_lock_key(model, filters)
        call_args = (lock_key, '1', self.delay)
        if not self._tnt.call('box.sentry_buffer.setnx', call_args):
            return

        results = {}
        for column, amount in columns.iteritems():
            key = self._make_key(model, filters, column)
            call_args = (key, '0', str(self.key_expire))
            response = self._tnt.call('box.sentry_buffer.getset', call_args)
            if len(response) == 0:
                continue
            value = int(response[0][0])
            results[column] = value

        hash_key = self._make_extra_key(model, filters)
        call_args = (hash_key,)
        extra_results = self._tnt.call('box.sentry_buffer.hgetalldelete',
                                       call_args)

        if len(extra_results):
            if not extra:
                extra = {}
            for key, value in extra_results:
                if not value:
                    continue
                extra[key] = pickle.loads(str(value))

        # Filter out empty or zero'd results to avoid a potentially unnecessary update
        results = dict((k, int(v)) for k, v in results.iteritems() if int(v or 0) > 0)
        if not results:
            return
        super(TarantoolBuffer, self).process(model, results, filters, extra)
Ejemplo n.º 13
0
Archivo: node.py Proyecto: webZW/sentry
    def to_python(self, value):
        node_id = None
        # If value is a string, we assume this is a value we've loaded from the
        # database, it should be decompressed/unpickled, and we should end up
        # with a dict.
        if value and isinstance(value, six.string_types):
            try:
                value = pickle.loads(decompress(value))
            except Exception as e:
                # TODO this is a bit dangerous as a failure to read/decode the
                # node_id will end up with this record being replaced with an
                # empty value under a new key, potentially orphaning an
                # original value in nodestore. OTOH if we can't decode the info
                # here, the node was already effectively orphaned.
                logger.exception(e)
                value = None

        if value:
            if 'node_id' in value:
                node_id = value.pop('node_id')
                # If the value is now empty, that means that it only had the
                # node_id in it, which means that we should be looking to *load*
                # the event body from nodestore. If it does have other stuff in
                # it, that means we got an event body with a precomputed id in
                # it, and we want to *save* the rest of the body to nodestore.
                if value == {}:
                    value = None
        else:
            # Either we were passed a null/empty value in the constructor, or
            # we failed to decode the value from the database so we have no id
            # to load data from, and no data to save.
            value = None

        return NodeData(self, node_id, value, wrapper=self.wrapper)
Ejemplo n.º 14
0
    def process(self, model, columns, filters, extra=None):
        results = {}
        with self.conn.map() as conn:
            for column, amount in columns.iteritems():
                key = self._make_key(model, filters, column)
                results[column] = conn.getset(key, 0)
                conn.expire(key, 60)  # drop expiration as it was just emptied

            hash_key = self._make_extra_key(model, filters)
            extra_results = conn.hgetall(hash_key)
            conn.delete(hash_key)

        # We combine the stored extra values with whatever was passed.
        # This ensures that static values get updated to their latest value,
        # and dynamic values (usually query expressions) are still dynamic.
        if extra_results:
            if not extra:
                extra = {}
            for key, value in extra_results.iteritems():
                if not value:
                    continue
                extra[key] = pickle.loads(str(value))

        # Filter out empty or zero'd results to avoid a potentially unnecessary update
        results = dict(
            (k, int(v)) for k, v in results.iteritems() if int(v or 0) > 0)
        if not results:
            return
        super(RedisBuffer, self).process(model, results, filters, extra)
Ejemplo n.º 15
0
 def to_python(self, value):
     if isinstance(value, basestring) and value:
         try:
             value = pickle.loads(base64.b64decode(value).decode('zlib'))
         except Exception, e:
             logger.exception(e)
             return {}
Ejemplo n.º 16
0
    def process(self, model, columns, filters, extra=None):
        results = {}
        with self.conn.map() as conn:
            for column, amount in columns.iteritems():
                key = self._make_key(model, filters, column)
                results[column] = conn.getset(key, 0)
                conn.expire(key, 60)  # drop expiration as it was just emptied

            hash_key = self._make_extra_key(model, filters)
            extra_results = conn.hgetall(hash_key)
            conn.delete(hash_key)

        # We combine the stored extra values with whatever was passed.
        # This ensures that static values get updated to their latest value,
        # and dynamic values (usually query expressions) are still dynamic.
        if extra_results:
            if not extra:
                extra = {}
            for key, value in extra_results.iteritems():
                if not value:
                    continue
                extra[key] = pickle.loads(str(value))

        # Filter out empty or zero'd results to avoid a potentially unnescesary update
        results = dict((k, int(v)) for k, v in results.iteritems() if int(v or 0) > 0)
        if not results:
            return
        super(RedisBuffer, self).process(model, results, filters, extra)
Ejemplo n.º 17
0
    def _process_single_incr(self, key):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not client.set(lock_key, '1', nx=True, ex=10):
            metrics.incr('buffer.revoked', tags={'reason': 'locked'})
            self.logger.debug('buffer.revoked.locked', extra={'redis_key': key})
            return

        pending_key = self._make_pending_key_from_key(key)

        try:
            conn = self.cluster.get_local_client_for_key(key)
            pipe = conn.pipeline()
            pipe.hgetall(key)
            pipe.zrem(pending_key, key)
            pipe.delete(key)
            values = pipe.execute()[0]

            if not values:
                metrics.incr('buffer.revoked', tags={'reason': 'empty'})
                self.logger.debug('buffer.revoked.empty', extra={'redis_key': key})
                return

            model = import_string(values.pop('m'))
            if values['f'].startswith('{'):
                filters = self._load_values(json.loads(values.pop('f')))
            else:
                # TODO(dcramer): legacy pickle support - remove in Sentry 9.1
                filters = pickle.loads(values.pop('f'))

            incr_values = {}
            extra_values = {}
            for k, v in six.iteritems(values):
                if k.startswith('i+'):
                    incr_values[k[2:]] = int(v)
                elif k.startswith('e+'):
                    if v.startswith('['):
                        extra_values[k[2:]] = self._load_value(json.loads(v))
                    else:
                        # TODO(dcramer): legacy pickle support - remove in Sentry 9.1
                        extra_values[k[2:]] = pickle.loads(v)

            super(RedisBuffer, self).process(model, incr_values, filters, extra_values)
        finally:
            client.delete(lock_key)
Ejemplo n.º 18
0
    def test_handles_gettext_lazy(self):
        def fake_gettext(to_translate):
            return u'Igpay Atinlay'

        fake_gettext_lazy = lazy(fake_gettext, str)

        self.assertEquals(
            pickle.loads(pickle.dumps(
                    transform(fake_gettext_lazy("something")))),
            u'Igpay Atinlay')
Ejemplo n.º 19
0
 def to_python(self, value):
     if isinstance(value, six.string_types) and value:
         try:
             value = pickle.loads(decompress(value))
         except Exception as e:
             logger.exception(e)
             return {}
     elif not value:
         return {}
     return value
Ejemplo n.º 20
0
 def to_python(self, value):
     if isinstance(value, six.string_types) and value:
         try:
             value = pickle.loads(decompress(value))
         except Exception as e:
             logger.exception(e)
             return {}
     elif not value:
         return {}
     return value
Ejemplo n.º 21
0
    def test_pickling_compat(self):
        event = self.store_event(
            data={
                "message": "Hello World!",
                "tags": {
                    "logger": "foobar",
                    "site": "foo",
                    "server_name": "bar"
                },
            },
            project_id=self.project.id,
        )

        # Ensure we load and memoize the interfaces as well.
        assert len(event.interfaces) > 0

        # When we pickle an event we need to make sure our canonical code
        # does not appear here or it breaks old workers.
        data = pickle.dumps(event, protocol=2)
        assert b"canonical" not in data

        # For testing we remove the backwards compat support in the
        # `NodeData` as well.
        nodedata_getstate = NodeData.__getstate__
        del NodeData.__getstate__

        # Old worker loading
        try:
            event2 = pickle.loads(data)
            assert event2.data == event.data
        finally:
            NodeData.__getstate__ = nodedata_getstate

        # New worker loading
        event2 = pickle.loads(data)
        assert event2.data == event.data
Ejemplo n.º 22
0
    def to_python(self, value):
        if isinstance(value, six.string_types) and value:
            try:
                value = pickle.loads(decompress(value))
            except Exception as e:
                logger.exception(e)
                value = {}
        elif not value:
            value = {}

        if 'node_id' in value:
            node_id = value.pop('node_id')
            data = None
        else:
            node_id = None
            data = value

        return NodeData(self, node_id, data)
Ejemplo n.º 23
0
    def to_python(self, value):
        if isinstance(value, six.string_types) and value:
            try:
                value = pickle.loads(decompress(value))
            except Exception as e:
                logger.exception(e)
                value = {}
        elif not value:
            value = {}

        if 'node_id' in value:
            node_id = value.pop('node_id')
            data = None
        else:
            node_id = None
            data = value

        return NodeData(self, node_id, data)
Ejemplo n.º 24
0
    def to_python(self, value):
        node_id = None
        # If value is a string, we assume this is a value we've loaded from the
        # database, it should be decompressed/unpickled, and we should end up
        # with a dict.
        if value and isinstance(value, six.string_types):
            try:
                value = pickle.loads(decompress(value))
            except Exception as e:
                # TODO this is a bit dangerous as a failure to read/decode the
                # node_id will end up with this record being replaced with an
                # empty value under a new key, potentially orphaning an
                # original value in nodestore. OTOH if we can't decode the info
                # here, the node was already effectively orphaned.
                logger.exception(e)
                value = None

        if value:
            if 'node_id' in value:
                node_id = value.pop('node_id')
                # If the value is now empty, that means that it only had the
                # node_id in it, which means that we should be looking to *load*
                # the event body from nodestore. If it does have other stuff in
                # it, that means we got an event body with a precomputed id in
                # it, and we want to *save* the rest of the body to nodestore.
                if value == {}:
                    value = None
        else:
            # Either we were passed a null/empty value in the constructor, or
            # we failed to decode the value from the database so we have no id
            # to load data from, and no data to save.
            value = None

        if value is not None and self.wrapper is not None:
            value = self.wrapper(value)

        return NodeData(self, node_id, value)
Ejemplo n.º 25
0
    def test_incr_saves_to_redis(self):
        now = datetime(2017, 5, 3, 6, 6, 6, tzinfo=timezone.utc)
        client = self.buf.cluster.get_routing_client()
        model = mock.Mock()
        model.__name__ = "Mock"
        columns = {"times_seen": 1}
        filters = {"pk": 1, "datetime": now}
        self.buf.incr(model,
                      columns,
                      filters,
                      extra={
                          "foo": "bar",
                          "datetime": now
                      })
        result = client.hgetall("foo")
        # Force keys to strings
        result = {force_text(k): v for k, v in six.iteritems(result)}

        f = result.pop("f")
        assert pickle.loads(f) == {"pk": 1, "datetime": now}
        assert pickle.loads(result.pop("e+datetime")) == now
        assert pickle.loads(result.pop("e+foo")) == "bar"
        assert result == {"i+times_seen": b"1", "m": b"mock.mock.Mock"}

        pending = client.zrange("b:p", 0, -1)
        assert pending == [b"foo"]
        self.buf.incr(model,
                      columns,
                      filters,
                      extra={
                          "foo": "baz",
                          "datetime": now
                      })
        result = client.hgetall("foo")
        # Force keys to strings
        result = {force_text(k): v for k, v in six.iteritems(result)}
        f = result.pop("f")
        assert pickle.loads(f) == {"pk": 1, "datetime": now}
        assert pickle.loads(result.pop("e+datetime")) == now
        assert pickle.loads(result.pop("e+foo")) == "baz"
        assert result == {"i+times_seen": b"2", "m": b"mock.mock.Mock"}

        pending = client.zrange("b:p", 0, -1)
        assert pending == [b"foo"]
Ejemplo n.º 26
0
 def decode(self, value):
     return pickle.loads(zlib.decompress(value))
Ejemplo n.º 27
0
def test_pickle():
    for cls in [TagKey, TagValue, GroupTagKey, GroupTagValue]:
        value = cls(**{name: 1 for name in cls.__slots__})
        pickle.loads(pickle.dumps(value)) == value
Ejemplo n.º 28
0
import warnings

from django.utils.text import slugify
from exam import fixture
from uuid import uuid4

from sentry.models import (
    Activity, Event, Group, Organization, OrganizationMember,
    OrganizationMemberTeam, Project, Team, User
)
from sentry.utils.compat import pickle
from sentry.utils.strings import decompress


# an example data blog from Sentry 5.4.1 (db level)
LEGACY_DATA = pickle.loads(decompress("""eJy9WW1v20YS/q5fwfqLpECluMvXFSzjgKK9BrikByR3XwyDXpFLmjVFsnxxbAT57zczS0rUS+LGrU8IYu3s2+yzM8/MrGZxxSYfpo0q2vrJzIpW1YmMVGO+U00jUzWdVHwyiysbBm13IgdaH++yxoB/0mhV0xp9p5GqQtWyVbHRNVmRGre3tXxQBQ26vYW57qT5MK1kLbcNtLzJLK/8SQOyVqYoCVAicJB6bGsJEmahBoz0fGpMWacPKOU4kKFiy/80qm6WcQSLqnppPmR128lcFQ/NUp9sucmKJSmCM52JhO1AIWy42Lhr26pZLZdqE9luYtuKucyxWCJiJSPXEcIPNrFkbJXYjmUnAVOMKyfijnB47FpuYgXehkcy/oesKjNVbQ9oVG6XDHfxJhJOlJcylg8pCnzSPpj8YpnC9yzf4SzwQRdoB4FtW5YfMN63bVsEjo29sEYHZ8UFBBy8PzFekkUYbsu4yxXCyBmCxjmMGs7NESvbZCazseXQjNOb/xWwwH6XFvBgTlSW95le1SdhgNfT1TlKUA+ED9F7lNsqV3hq6LEtHHWnZAyXg23SyOZ0tQVeoW2TxEHJH52qn8KmrcFosMuFZafYEcsWjcD2aKyPoq1q78oYhQGM+ufPH/Gr+MpxPrQyugdDishwyZQcNKUEoUO9HDIkh3Rx0LKTrojarETIHFRj02V5HG4b1MvxUAG5acJKtnco8P+cAebZZlk9gd4FN/1lk7XqxwoUA5dptGEuN7JRZvWEaxK+Va3CqISDPKKdOgK1dC2CBSzWGH0QIrOr4I+afUYXYzDiwjj6fBublfH5AmbyczNpdo/XCjy8hXuCiWFWJOVMyxc42T5WbPzJs6YNt/IxBFjS9m7dqDwxj4QLVN4hM3+QZDQuWaGLVlh1mzyLwnuFELn+5D3aEQDXhu1ThZfrBoOxmyQfk5hLjBJ1eVVnCKdn7cY2UZ1VMLjuioJ8yWOTPR15fLRRhkbnoRu5Ikg2TNierXzHVVGwUZ7nKm8jg2DDNhzHkV3ffwK+ooXoJJ53QKQeWM/FC6kUEPfIUHJQDl3RQ1fkFnzzNRvcT5+hdh9Ommp69fkkZWjL1weEtDAO+IiaAx3d4Ao2riDwFAMZgV7+wC15gmPQiS412GTkP+UZKGWUm99V1BqyNaxHZjm28BNmXeEEcrI226qwqWAkivR9o4ljC28av+MYc/gy4xazFwZfGMyBP9bC8BaGDRLHF47P5jiRzOBOFnFOVx1Ye9UObeZIOztRG19rF5B51KrpctQsoPgY2JMUuPbi8+5yV8YL73VhDOFxZVzffAE4Aw0nUCbu5E7Sv2g2gXcQgwO6drzNIKCNdtQYoEVd9guW9YAJkFfdU4AeOkIpsVxCSVgj8hZE/QKDUV6mKUEvbDyDhp5iMSgm4KApBB7EEcMLYHgmtABAfQSAfmR/xEi4OPW1bkAAYilyxsV50sAhOoshWPB4weStxUZBGWViRzroB5TaEExJBvwHQJKEDYNGEYFZFDarEuhyHxMAcMoiLIxax3z7ZUEj3GNO/jInuYfy6Zjts+SZEGFkBYWa1QUu4B8vDPOJ07MiyrtYUYBsVrRZQJSeFSFkRyQQAA6dvD9MmGcFnZ5ZZ44yfHR2cBJETsR0QkZuiusWJbX55C1Hq5SUTIK/UnCPZNV2td4bre814jljaJw6gjPmHYdwAK4o2x68JgRL2OQqns0JO3aCc61AYcpjIX2UR2vh/RhrvdYub5ntw+SCRtD/8H1PsWQswOOySXXIZZBRpt+KqIzvgwfjL4sejJ8NH4xy0/S74wYmzOCmGLFTChip15/F+8ucySD1hfV2IZZhEgzbBLiN5jcGuXB6jtYYpsIv5DVms9ckNob5+DPMxiBPh6PuGC09w2OYxKdf4S7bpT7NVfaJ+WsfVkU8e/MGjZO81/ZP+EnbvTHDMdf7hOxGm/T1NLpT0X3Tbac3c1J6cA7cu+eb9Dy/UKG5MIi6wSkg8VvjfwvjzRudvmmVBC0ANOJAjqppBOqJAxoZuYfDXotNHL5nE8cenefi4oL6nTG8P9UKDAIspTAIMyOpyy0YRm8yt7cmzXFP8L66ujIi8jjz8HSz6bunfq3fOzC+O2B1sLv4hykB73jj7Qed/BG1QH1D7vjiNwTm4F18Pz+4aAM9J0CRhOyFfjWU5eAUf56+wJeoFAdnHKiLHMrlmoM+TN+XOqa5SHJAEXorSn9g0ogiFucCL5XhUJV9F2GcXendjjb+fgqB5lBU7c50xCAaFeQHgeHkY91pVNxDPoUarznPLa7/dW6BCLXnFleMuSVWidEb7s+PkaqwpJ8h2SzA4SMqXtd4RSM3p4gLZHhqvx573qewNWxETuXxr1HQMakRB/bKzs5H3MVwQ+v+70hvRNizB3pyvSHLgRJU09NWZpQxeO7fSkr9TS/1TfdX4nl7eiIvH85KdeoaPQDsynz7/pffKOvwgoNogCS8RiPRnWLcSdRcom0RP9M72sFtEZOvP1PHySPI4K/Vpxif6KpPXRbPyga/K/w6n19bN/iQwaAY3rOVjxQLNt+/u/mYbF+CEiQyf6Pr/jd1Q4IM6heRGnGPxS3NPT49fNZlSZm7j2HwcsDiX8QKJ8QVSE/0k+ndq6/nIzCa/hmE+fQC0D8xMF+jHlA432UfASHxym+ctBGnPD9uyNYCe/J/eFgN6JVFxylqf3dQwGp4yOCgFD6fwWFl/NIMLhCvmsEJ6/kMTuhKFF2H3o5Rm8v/yrzb1+5oq9HGwiBBVfvK0OSoH8J068sVLWYfJYEnL2hMHKeDZ5lCjBND4Y2oQhevYlf7zCkDE4f1DtRNfX4CXtcqM87iMJFZ3ldOQowJAEIUWMFU1XVZ/4CYgF9+i5iJMPaJgaaJvj2bL2gBNjAuPgkh4XIo0zXhXuqi/4qe5u3vIN3xDxXccnZUyi1cNttWZQ2l4hM9xusinmJPdZ+GtWrKroaIb/TDUN2Qlg2rMiP/4NY+sQb8whCfHcLQWK+NaRhimAjD6YpOt6Nl/NFFPWbtjOaPakRO2XQYYqHZAvfBVPzhATOd/vzGvhc6jRl9/zEr5mhInNGjRhji80c/9wU/53Dm6GX64NSv5NKDYY8UFt17nVB4oouvF6nVH10GSPar7Arg9Xr/ywmjV8Rz6HJ6Txx+QDi5gN07mXK4p4h+OGd6Y30RJOGEan8ZKLD1kLiMeoEDh+td8GCgu3O7A4S4t3c0zoeYPKeu4FtecHyA2REYmP6VRVPC/fUejiK973yGeQnnu7IJvsimMf8Hr5plBQ=="""))


class Fixtures(object):
    @fixture
    def projectkey(self):
        return self.create_project_key(project=self.project)

    @fixture
    def user(self):
        return self.create_user('admin@localhost', is_superuser=True)

    @fixture
    def organization(self):
        # XXX(dcramer): ensure that your org slug doesnt match your team slug
        # and the same for your project slug
Ejemplo n.º 29
0
 def decode(self, value):
     return pickle.loads(zlib.decompress(value))
Ejemplo n.º 30
0
import six
import warnings

from django.utils.text import slugify
from exam import fixture
from uuid import uuid4

from sentry.models import (Activity, Event, Group, Organization,
                           OrganizationMember, OrganizationMemberTeam, Project,
                           Team, User)
from sentry.utils.compat import pickle
from sentry.utils.strings import decompress

# an example data blog from Sentry 5.4.1 (db level)
LEGACY_DATA = pickle.loads(
    decompress(
        """eJy9WW1v20YS/q5fwfqLpECluMvXFSzjgKK9BrikByR3XwyDXpFLmjVFsnxxbAT57zczS0rUS+LGrU8IYu3s2+yzM8/MrGZxxSYfpo0q2vrJzIpW1YmMVGO+U00jUzWdVHwyiysbBm13IgdaH++yxoB/0mhV0xp9p5GqQtWyVbHRNVmRGre3tXxQBQ26vYW57qT5MK1kLbcNtLzJLK/8SQOyVqYoCVAicJB6bGsJEmahBoz0fGpMWacPKOU4kKFiy/80qm6WcQSLqnppPmR128lcFQ/NUp9sucmKJSmCM52JhO1AIWy42Lhr26pZLZdqE9luYtuKucyxWCJiJSPXEcIPNrFkbJXYjmUnAVOMKyfijnB47FpuYgXehkcy/oesKjNVbQ9oVG6XDHfxJhJOlJcylg8pCnzSPpj8YpnC9yzf4SzwQRdoB4FtW5YfMN63bVsEjo29sEYHZ8UFBBy8PzFekkUYbsu4yxXCyBmCxjmMGs7NESvbZCazseXQjNOb/xWwwH6XFvBgTlSW95le1SdhgNfT1TlKUA+ED9F7lNsqV3hq6LEtHHWnZAyXg23SyOZ0tQVeoW2TxEHJH52qn8KmrcFosMuFZafYEcsWjcD2aKyPoq1q78oYhQGM+ufPH/Gr+MpxPrQyugdDishwyZQcNKUEoUO9HDIkh3Rx0LKTrojarETIHFRj02V5HG4b1MvxUAG5acJKtnco8P+cAebZZlk9gd4FN/1lk7XqxwoUA5dptGEuN7JRZvWEaxK+Va3CqISDPKKdOgK1dC2CBSzWGH0QIrOr4I+afUYXYzDiwjj6fBublfH5AmbyczNpdo/XCjy8hXuCiWFWJOVMyxc42T5WbPzJs6YNt/IxBFjS9m7dqDwxj4QLVN4hM3+QZDQuWaGLVlh1mzyLwnuFELn+5D3aEQDXhu1ThZfrBoOxmyQfk5hLjBJ1eVVnCKdn7cY2UZ1VMLjuioJ8yWOTPR15fLRRhkbnoRu5Ikg2TNierXzHVVGwUZ7nKm8jg2DDNhzHkV3ffwK+ooXoJJ53QKQeWM/FC6kUEPfIUHJQDl3RQ1fkFnzzNRvcT5+hdh9Ommp69fkkZWjL1weEtDAO+IiaAx3d4Ao2riDwFAMZgV7+wC15gmPQiS412GTkP+UZKGWUm99V1BqyNaxHZjm28BNmXeEEcrI226qwqWAkivR9o4ljC28av+MYc/gy4xazFwZfGMyBP9bC8BaGDRLHF47P5jiRzOBOFnFOVx1Ye9UObeZIOztRG19rF5B51KrpctQsoPgY2JMUuPbi8+5yV8YL73VhDOFxZVzffAE4Aw0nUCbu5E7Sv2g2gXcQgwO6drzNIKCNdtQYoEVd9guW9YAJkFfdU4AeOkIpsVxCSVgj8hZE/QKDUV6mKUEvbDyDhp5iMSgm4KApBB7EEcMLYHgmtABAfQSAfmR/xEi4OPW1bkAAYilyxsV50sAhOoshWPB4weStxUZBGWViRzroB5TaEExJBvwHQJKEDYNGEYFZFDarEuhyHxMAcMoiLIxax3z7ZUEj3GNO/jInuYfy6Zjts+SZEGFkBYWa1QUu4B8vDPOJ07MiyrtYUYBsVrRZQJSeFSFkRyQQAA6dvD9MmGcFnZ5ZZ44yfHR2cBJETsR0QkZuiusWJbX55C1Hq5SUTIK/UnCPZNV2td4bre814jljaJw6gjPmHYdwAK4o2x68JgRL2OQqns0JO3aCc61AYcpjIX2UR2vh/RhrvdYub5ntw+SCRtD/8H1PsWQswOOySXXIZZBRpt+KqIzvgwfjL4sejJ8NH4xy0/S74wYmzOCmGLFTChip15/F+8ucySD1hfV2IZZhEgzbBLiN5jcGuXB6jtYYpsIv5DVms9ckNob5+DPMxiBPh6PuGC09w2OYxKdf4S7bpT7NVfaJ+WsfVkU8e/MGjZO81/ZP+EnbvTHDMdf7hOxGm/T1NLpT0X3Tbac3c1J6cA7cu+eb9Dy/UKG5MIi6wSkg8VvjfwvjzRudvmmVBC0ANOJAjqppBOqJAxoZuYfDXotNHL5nE8cenefi4oL6nTG8P9UKDAIspTAIMyOpyy0YRm8yt7cmzXFP8L66ujIi8jjz8HSz6bunfq3fOzC+O2B1sLv4hykB73jj7Qed/BG1QH1D7vjiNwTm4F18Pz+4aAM9J0CRhOyFfjWU5eAUf56+wJeoFAdnHKiLHMrlmoM+TN+XOqa5SHJAEXorSn9g0ogiFucCL5XhUJV9F2GcXendjjb+fgqB5lBU7c50xCAaFeQHgeHkY91pVNxDPoUarznPLa7/dW6BCLXnFleMuSVWidEb7s+PkaqwpJ8h2SzA4SMqXtd4RSM3p4gLZHhqvx573qewNWxETuXxr1HQMakRB/bKzs5H3MVwQ+v+70hvRNizB3pyvSHLgRJU09NWZpQxeO7fSkr9TS/1TfdX4nl7eiIvH85KdeoaPQDsynz7/pffKOvwgoNogCS8RiPRnWLcSdRcom0RP9M72sFtEZOvP1PHySPI4K/Vpxif6KpPXRbPyga/K/w6n19bN/iQwaAY3rOVjxQLNt+/u/mYbF+CEiQyf6Pr/jd1Q4IM6heRGnGPxS3NPT49fNZlSZm7j2HwcsDiX8QKJ8QVSE/0k+ndq6/nIzCa/hmE+fQC0D8xMF+jHlA432UfASHxym+ctBGnPD9uyNYCe/J/eFgN6JVFxylqf3dQwGp4yOCgFD6fwWFl/NIMLhCvmsEJ6/kMTuhKFF2H3o5Rm8v/yrzb1+5oq9HGwiBBVfvK0OSoH8J068sVLWYfJYEnL2hMHKeDZ5lCjBND4Y2oQhevYlf7zCkDE4f1DtRNfX4CXtcqM87iMJFZ3ldOQowJAEIUWMFU1XVZ/4CYgF9+i5iJMPaJgaaJvj2bL2gBNjAuPgkh4XIo0zXhXuqi/4qe5u3vIN3xDxXccnZUyi1cNttWZQ2l4hM9xusinmJPdZ+GtWrKroaIb/TDUN2Qlg2rMiP/4NY+sQb8whCfHcLQWK+NaRhimAjD6YpOt6Nl/NFFPWbtjOaPakRO2XQYYqHZAvfBVPzhATOd/vzGvhc6jRl9/zEr5mhInNGjRhji80c/9wU/53Dm6GX64NSv5NKDYY8UFt17nVB4oouvF6nVH10GSPar7Arg9Xr/ywmjV8Rz6HJ6Txx+QDi5gN07mXK4p4h+OGd6Y30RJOGEan8ZKLD1kLiMeoEDh+td8GCgu3O7A4S4t3c0zoeYPKeu4FtecHyA2REYmP6VRVPC/fUejiK973yGeQnnu7IJvsimMf8Hr5plBQ=="""
    ))


class Fixtures(object):
    @fixture
    def projectkey(self):
        return self.create_project_key(project=self.project)

    @fixture
    def user(self):
        return self.create_user('admin@localhost', is_superuser=True)

    @fixture
    def organization(self):
        # XXX(dcramer): ensure that your org slug doesnt match your team slug
Ejemplo n.º 31
0
    try:
        try:
            data = base64.b64decode(data).decode('zlib')
        except zlib.error:
            data = base64.b64decode(data)
    except Exception, e:
        # This error should be caught as it suggests that there's a
        # bug somewhere in the client's code.
        logger.exception('Bad data received')
        return HttpResponseForbidden('Bad data decoding request (%s, %s)' %
                                     (e.__class__.__name__, e))

    try:
        if format == 'pickle':
            data = pickle.loads(data)
        elif format == 'json':
            data = json.loads(data)
    except Exception, e:
        # This error should be caught as it suggests that there's a
        # bug somewhere in the client's code.
        logger.exception('Bad data received')
        return HttpResponseForbidden(
            'Bad data reconstructing object (%s, %s)' %
            (e.__class__.__name__, e))

    # XXX: ensure keys are coerced to strings
    data = dict((smart_str(k), v) for k, v in data.iteritems())

    if 'timestamp' in data:
        if is_float(data['timestamp']):
Ejemplo n.º 32
0
 def to_python(self, value):
     if isinstance(value, basestring) and value:
         value = pickle.loads(base64.b64decode(value).decode('zlib'))
     elif not value:
         return {}
     return value
Ejemplo n.º 33
0
    logger = logging.getLogger('sentry.server')

    try:
        try:
            data = base64.b64decode(data).decode('zlib')
        except zlib.error:
            data = base64.b64decode(data)
    except Exception, e:
        # This error should be caught as it suggests that there's a
        # bug somewhere in the client's code.
        logger.exception('Bad data received')
        return HttpResponseForbidden('Bad data decoding request (%s, %s)' % (e.__class__.__name__, e))

    try:
        if format == 'pickle':
            data = pickle.loads(data)
        elif format == 'json':
            data = json.loads(data)
    except Exception, e:
        # This error should be caught as it suggests that there's a
        # bug somewhere in the client's code.
        logger.exception('Bad data received')
        fh = open('/tmp/bad_sentry_data.txt', 'a')
        print >>fh, 'Bad data reconstructing object (%s, %s)' % (e.__class__.__name__, e)
        print >>fh, data
        print >>fh, '#' * 80, '\n\n'
        fh.close()

        return HttpResponseForbidden('Bad data reconstructing object (%s, %s)' % (e.__class__.__name__, e))

    # XXX: ensure keys are coerced to strings