Пример #1
0
 def decode(self, session_data):
     flag, data = ord(session_data[:1]), session_data[1:]
     if flag & FLAG_COMPRESSED:
         if conf['COMPRESS_LIB']:
             return pickle.loads(compress_lib.decompress(data))
         raise ValueError('redisession: found compressed data without COMPRESS_LIB specified.')
     return pickle.loads(data)
Пример #2
0
    def get(self, key, default=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        db = router.db_for_read(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)

        with connections[db].cursor() as cursor:
            cursor.execute("SELECT cache_key, value, expires FROM %s "
                           "WHERE cache_key = %%s" % table, [key])
            row = cursor.fetchone()
        if row is None:
            return default
        now = timezone.now()
        expires = row[2]
        if connections[db].features.needs_datetime_string_cast and not isinstance(expires, datetime):
            # Note: typecasting is needed by some 3rd party database backends.
            # All core backends work without typecasting, so be careful about
            # changes here - test suite will NOT pick regressions here.
            expires = typecast_timestamp(str(expires))
        if expires < now:
            db = router.db_for_write(self.cache_model_class)
            with connections[db].cursor() as cursor:
                cursor.execute("DELETE FROM %s "
                               "WHERE cache_key = %%s" % table, [key])
            return default
        value = connections[db].ops.process_clob(row[1])
        return pickle.loads(base64.b64decode(force_bytes(value)))
Пример #3
0
    def test_send_all(self):
        users = [self.user, self.user2, self.user3]

        email_id = get_backend_id("email")
        ns = NoticeSetting.objects.create(
            user=self.user,
            notice_type=self.notice_type,
            medium=email_id,
            send=True
        )
        ns2 = NoticeSetting.objects.create(
            user=self.user2,
            notice_type=self.notice_type,
            medium=email_id,
            send=True
        )

        send(users, "label", queue=True)
        self.assertEqual(NoticeQueueBatch.objects.count(), 1)
        batch = NoticeQueueBatch.objects.all()[0]
        notices = pickle.loads(base64.b64decode(batch.pickled_data))
        self.assertEqual(len(notices), 3)

        send_all()
        self.assertEqual(len(mail.outbox), 2)
        self.assertIn(self.user.email, mail.outbox[0].to)
        self.assertIn(self.user2.email, mail.outbox[1].to)
        self.assertNotIn(self.user3.email, mail.outbox[0].to)
        self.assertNotIn(self.user3.email, mail.outbox[1].to)

        ns.delete()
        ns2.delete()
Пример #4
0
    def get(self, key, default=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        db = router.db_for_read(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)

        with connections[db].cursor() as cursor:
            cursor.execute(
                "SELECT cache_key, value, expires FROM %s "
                "WHERE cache_key = %%s" % table, [key])
            row = cursor.fetchone()
        if row is None:
            return default
        now = timezone.now()
        expires = row[2]
        if connections[
                db].features.needs_datetime_string_cast and not isinstance(
                    expires, datetime):
            # Note: typecasting is needed by some 3rd party database backends.
            # All core backends work without typecasting, so be careful about
            # changes here - test suite will NOT pick regressions here.
            expires = typecast_timestamp(str(expires))
        if expires < now:
            db = router.db_for_write(self.cache_model_class)
            with connections[db].cursor() as cursor:
                cursor.execute(
                    "DELETE FROM %s "
                    "WHERE cache_key = %%s" % table, [key])
            return default
        value = connections[db].ops.process_clob(row[1])
        return pickle.loads(base64.b64decode(force_bytes(value)))
Пример #5
0
    def _value_from_db_model(self, value, field, field_kind, db_type):
        """
        Deconverts values stored for EmbeddedModelFields.

        Embedded instances are stored as a (column, value) pairs in a
        dict, a single-flattened list or a serialized dict.

        Returns a tuple with model class and field.attname => value
        mapping.
        """

        # Separate keys from values and create a dict or unpickle one.
        if db_type == 'list':
            value = dict(zip(value[::2], value[1::2]))
        elif db_type == 'bytes' or db_type == 'string':
            value = pickle.loads(value)

        # Let untyped fields determine the embedded instance's model.
        embedded_model = field.stored_model(value)

        # Deconvert fields' values and prepare a dict that can be used
        # to initialize a model (by changing keys from columns to
        # attribute names).
        return embedded_model, dict(
            (subfield.attname, self._value_from_db(
                value[subfield.column], *self._convert_as(subfield)))
            for subfield in embedded_model._meta.fields
            if subfield.column in value)
Пример #6
0
    def test_pickle(self):
        "Testing pickling and unpickling support."
        # Using both pickle and cPickle -- just 'cause.
        from django.utils.six.moves import cPickle
        import pickle

        # Creating a list of test geometries for pickling,
        # and setting the SRID on some of them.
        def get_geoms(lst, srid=None):
            return [GEOSGeometry(tg.wkt, srid) for tg in lst]

        tgeoms = get_geoms(self.geometries.points)
        tgeoms.extend(get_geoms(self.geometries.multilinestrings, 4326))
        tgeoms.extend(get_geoms(self.geometries.polygons, 3084))
        tgeoms.extend(get_geoms(self.geometries.multipolygons, 900913))

        # The SRID won't be exported in GEOS 3.0 release candidates.
        no_srid = self.null_srid == -1
        for geom in tgeoms:
            s1, s2 = cPickle.dumps(geom), pickle.dumps(geom)
            g1, g2 = cPickle.loads(s1), pickle.loads(s2)
            for tmpg in (g1, g2):
                self.assertEqual(geom, tmpg)
                if not no_srid:
                    self.assertEqual(geom.srid, tmpg.srid)
Пример #7
0
    def test_pickle(self):
        "Testing pickling and unpickling support."
        # Using both pickle and cPickle -- just 'cause.
        from django.utils.six.moves import cPickle
        import pickle

        # Creating a list of test geometries for pickling,
        # and setting the SRID on some of them.
        def get_geoms(lst, srid=None):
            return [GEOSGeometry(tg.wkt, srid) for tg in lst]

        tgeoms = get_geoms(self.geometries.points)
        tgeoms.extend(get_geoms(self.geometries.multilinestrings, 4326))
        tgeoms.extend(get_geoms(self.geometries.polygons, 3084))
        tgeoms.extend(get_geoms(self.geometries.multipolygons, 3857))

        # The SRID won't be exported in GEOS 3.0 release candidates.
        no_srid = self.null_srid == -1
        for geom in tgeoms:
            s1, s2 = cPickle.dumps(geom), pickle.dumps(geom)
            g1, g2 = cPickle.loads(s1), pickle.loads(s2)
            for tmpg in (g1, g2):
                self.assertEqual(geom, tmpg)
                if not no_srid:
                    self.assertEqual(geom.srid, tmpg.srid)
Пример #8
0
    def _value_from_db_model(self, value, field, field_kind, db_type):
        """
        Deconverts values stored for EmbeddedModelFields.

        Embedded instances are stored as a (column, value) pairs in a
        dict, a single-flattened list or a serialized dict.

        Returns a tuple with model class and field.attname => value
        mapping.
        """

        # Separate keys from values and create a dict or unpickle one.
        if db_type == 'list':
            value = dict(zip(value[::2], value[1::2]))
        elif db_type == 'bytes' or db_type == 'string':
            value = pickle.loads(value)

        # Let untyped fields determine the embedded instance's model.
        embedded_model = field.stored_model(value)

        # Deconvert fields' values and prepare a dict that can be used
        # to initialize a model (by changing keys from columns to
        # attribute names).
        return embedded_model, dict(
            (subfield.attname,
             self._value_from_db(value[subfield.column],
                                 *self._convert_as(subfield)))
            for subfield in embedded_model._meta.fields
            if subfield.column in value)
Пример #9
0
    def get(self, key, default=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        db = router.db_for_read(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute(
                "SELECT cache_key, value, expires FROM %s "
                "WHERE cache_key = %%s" % table, [key])
            row = cursor.fetchone()
        if row is None:
            return default

        expires = row[2]
        expression = models.Expression(output_field=models.DateTimeField())
        for converter in (connection.ops.get_db_converters(expression) +
                          expression.get_db_converters(connection)):
            expires = converter(expires, expression, connection, {})

        if expires < timezone.now():
            db = router.db_for_write(self.cache_model_class)
            connection = connections[db]
            with connection.cursor() as cursor:
                cursor.execute(
                    "DELETE FROM %s "
                    "WHERE cache_key = %%s" % table, [key])
            return default

        value = connection.ops.process_clob(row[1])
        return pickle.loads(base64.b64decode(force_bytes(value)))
 def test_queue(self):
     users = [self.user, self.user2]
     queue(users, "label")
     self.assertEqual(EventQueueBatch.objects.count(), 1)
     batch = EventQueueBatch.objects.all()[0]
     notices = pickle.loads(base64.b64decode(batch.pickled_data))
     self.assertEqual(len(notices), 2)
Пример #11
0
 def from_db_value(self, value, expression, connection, context):
     if value:
         try:
             return pickle.loads(self._fernet.decrypt(value, self._ttl))
         except SignatureExpired:
             return Expired
     return value
Пример #12
0
 def test17_pickle(self):
     "Testing pickle support."
     g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84')
     g2 = pickle.loads(pickle.dumps(g1))
     self.assertEqual(g1, g2)
     self.assertEqual(4326, g2.srs.srid)
     self.assertEqual(g1.srs.wkt, g2.srs.wkt)
Пример #13
0
 def test_pickle(self):
     "Testing pickle support."
     g1 = OGRGeometry("LINESTRING(1 1 1,2 2 2,3 3 3)", "WGS84")
     g2 = pickle.loads(pickle.dumps(g1))
     self.assertEqual(g1, g2)
     self.assertEqual(4326, g2.srs.srid)
     self.assertEqual(g1.srs.wkt, g2.srs.wkt)
Пример #14
0
    def get(self, key, default=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        db = router.db_for_read(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT cache_key, value, expires FROM %s "
                           "WHERE cache_key = %%s" % table, [key])
            row = cursor.fetchone()
        if row is None:
            return default

        expires = row[2]
        expression = models.Expression(output_field=models.DateTimeField())
        for converter in (connection.ops.get_db_converters(expression) +
                              expression.get_db_converters(connection)):
            expires = converter(expires, expression, connection, {})

        if expires < timezone.now():
            db = router.db_for_write(self.cache_model_class)
            connection = connections[db]
            with connection.cursor() as cursor:
                cursor.execute("DELETE FROM %s "
                               "WHERE cache_key = %%s" % table, [key])
            return default

        value = connection.ops.process_clob(row[1])
        return pickle.loads(base64.b64decode(force_bytes(value)))
Пример #15
0
    def test_send_all(self):
        users = [self.user, self.user2, self.user3]

        email_id = get_backend_id("email")
        ns = NoticeSetting.objects.create(user=self.user,
                                          notice_type=self.notice_type,
                                          medium=email_id,
                                          send=True)
        ns2 = NoticeSetting.objects.create(user=self.user2,
                                           notice_type=self.notice_type,
                                           medium=email_id,
                                           send=True)

        send(users, "label", queue=True)
        self.assertEqual(NoticeQueueBatch.objects.count(), 1)
        batch = NoticeQueueBatch.objects.all()[0]
        notices = pickle.loads(base64.b64decode(batch.pickled_data))
        self.assertEqual(len(notices), 3)

        send_all()
        self.assertEqual(len(mail.outbox), 2)
        self.assertIn(self.user.email, mail.outbox[0].to)
        self.assertIn(self.user2.email, mail.outbox[1].to)
        self.assertNotIn(self.user3.email, mail.outbox[0].to)
        self.assertNotIn(self.user3.email, mail.outbox[1].to)

        ns.delete()
        ns2.delete()
Пример #16
0
 def _load(self, value):
     try:
         return pickle.loads(
             self._fernet.decrypt(value, self.ttl)
         )
     except SignatureExpired:
         return Expired
Пример #17
0
def send_all(*args):
    lock = acquire_lock(*args)
    batches, sent, sent_actual = 0, 0, 0
    start_time = time.time()

    try:
        # nesting the try statement to be Python 2.4
        try:
            for queued_batch in NoticeQueueBatch.objects.all():
                notices = pickle.loads(
                    base64.b64decode(queued_batch.pickled_data))
                for user, label, extra_context, sender in notices:
                    try:
                        user = get_user_model().objects.get(pk=user)
                        logging.info("emitting notice {0} to {1}".format(
                            label, user))
                        # call this once per user to be atomic and allow for logging to
                        # accurately show how long each takes.
                        if notification.send_now([user], label, extra_context,
                                                 sender):
                            sent_actual += 1
                    except get_user_model().DoesNotExist:
                        # Ignore deleted users, just warn about them
                        logging.warning(
                            "not emitting notice {0} to user {1} since it does not exist"
                            .format(label, user))
                    sent += 1
                queued_batch.delete()
                batches += 1
            emitted_notices.send(sender=NoticeQueueBatch,
                                 batches=batches,
                                 sent=sent,
                                 sent_actual=sent_actual,
                                 run_time="%.2f seconds" %
                                 (time.time() - start_time))
        except Exception:  # pylint: disable-msg=W0703
            # get the exception
            _, e, _ = sys.exc_info()
            # email people
            current_site = Site.objects.get_current()
            subject = "[{0} emit_notices] {1}".format(current_site.name, e)
            message = "\n".join(
                traceback.format_exception(
                    *sys.exc_info())  # pylint: disable-msg=W0142
            )
            mail_admins(subject, message, fail_silently=True)
            # log it as critical
            logging.critical("an exception occurred: {0}".format(e))
    finally:
        logging.debug("releasing lock...")
        lock.release()
        logging.debug("released.")

    logging.info("")
    logging.info("{0} batches, {1} sent".format(
        batches,
        sent,
    ))
    logging.info("done in {0:.2f} seconds".format(time.time() - start_time))
Пример #18
0
def send_all(*args):
    lock = acquire_lock(*args)
    batches, sent, sent_actual = 0, 0, 0
    start_time = time.time()

    try:
        # nesting the try statement to be Python 2.4
        try:
            for queued_batch in NoticeQueueBatch.objects.all():
                notices = pickle.loads(
                    base64.b64decode(queued_batch.pickled_data))
                for user, label, extra_context, sender in notices:
                    try:
                        user = get_user_model().objects.get(pk=user)
                        logging.info(
                            "emitting notice {0} to {1}".format(label, user))
                        # call this once per user to be atomic and allow for logging to
                        # accurately show how long each takes.
                        if notification.send_now([user], label, extra_context, sender):
                            sent_actual += 1
                    except get_user_model().DoesNotExist:
                        # Ignore deleted users, just warn about them
                        logging.warning(
                            "not emitting notice {0} to user {1} since it does not exist".format(
                                label,
                                user)
                        )
                    sent += 1
                queued_batch.delete()
                batches += 1
            emitted_notices.send(
                sender=NoticeQueueBatch,
                batches=batches,
                sent=sent,
                sent_actual=sent_actual,
                run_time="%.2f seconds" % (time.time() - start_time)
            )
        except Exception:  # pylint: disable-msg=W0703
            # get the exception
            _, e, _ = sys.exc_info()
            # email people
            current_site = Site.objects.get_current()
            subject = "[{0} emit_notices] {1}".format(current_site.name, e)
            message = "\n".join(
                traceback.format_exception(
                    *sys.exc_info())  # pylint: disable-msg=W0142
            )
            mail_admins(subject, message, fail_silently=True)
            # log it as critical
            logging.critical("an exception occurred: {0}".format(e))
    finally:
        logging.debug("releasing lock...")
        lock.release()
        logging.debug("released.")

    logging.info("")
    logging.info("{0} batches, {1} sent".format(batches, sent,))
    logging.info("done in {0:.2f} seconds".format(time.time() - start_time))
Пример #19
0
    def get(self,
            key,
            default=None,
            version=None,
            acquire_lock=True,
            timeout_local=DEFAULT_TIMEOUT):
        """
        get value. get --> local --> remote.

        If get remote failed, reuse local value.

        :param key: key
        :param default: 如果local和remote都为None,返回default
        :param version: version
        :param timeout_local: 默认本地的缓存过期时间
        :return:
        """
        make_key = self.localc.make_key(key, version=version)
        self.localc.validate_key(make_key)
        value = None
        with (self.localc._lock.reader() if acquire_lock else dummy()):
            pickled = self.localc._cache.get(make_key)
            expired = self.localc._has_expired(make_key)
        if pickled is not None:
            try:
                value = pickle.loads(pickled)
            except pickle.PickleError:
                value = None

        if (expired is False) and (value is not None):
            return value
        elif expired:
            self.localc.delete(key, version=version)

        # local数据异常,查remote cache
        try:
            # 尝试从remote cache 读取
            remote_value = self.remotec.get(key=key, version=version)
        except Exception as ex:
            # 如果backend cache异常,且cache值不为None,则local重新缓存这个key,
            # 并且延长timeout_local设定的时间, 下次会直接命中local
            self._printexc(ex)
            if value is not None:
                self.localc.set(key,
                                value,
                                timeout=timeout_local,
                                version=version)
        else:
            # 如果remote cache未异常,更新local cache
            if (remote_value is not None) or self.remotec.has_key(
                    key, version=version):
                self.localc.set(key,
                                remote_value,
                                timeout=timeout_local,
                                version=version)
                value = remote_value
        return value or default
Пример #20
0
def test_storage_gone(pic1):
    product = Product.objects.create(image=pic1['filename'])
    assert os.path.exists(pic1['path'])
    product.image = random_pic()
    product = pickle.loads(pickle.dumps(product))
    assert hasattr(product.image, 'storage')
    with transaction.atomic(get_using(product)):
        product.save()
    assert not os.path.exists(pic1['path'])
Пример #21
0
def test_storage_gone(pic1):
    product = Product.objects.create(image=pic1['filename'])
    assert os.path.exists(pic1['path'])
    product.image = 'new.jpg'
    product = pickle.loads(pickle.dumps(product))
    assert hasattr(product.image, 'storage')
    with transaction.atomic(get_using(product)):
        product.save()
    assert not os.path.exists(pic1['path'])
Пример #22
0
 def read_all(self):
     with open(self.fpath, 'rb') as f:
         data = f.read()
     if not data:
         return []
     deserialized = pickle.loads(data)
     for (sender, results, context) in deserialized:
         results_read.send(sender=sender, results=results, context=context)
     return deserialized
Пример #23
0
 def load(self, bytes):
     """
     Read a pickled object hierarchy from a bytes object and return the
     reconstituted object hierarchy specified therein.
     """
     if bytes:
         return pickle.loads(force_bytes(bytes))
     else:
         return None
Пример #24
0
 def _load(self, value):
     try:
         return pickle.loads(self._fernet.decrypt(value, self.ttl))
     except BadSignature:
         if settings.ALLOW_UNENCRYPTED:
             return value
         raise
     except SignatureExpired:
         return Expired
Пример #25
0
 def get(self, key, default=None, version=None):
     fname = self._key_to_file(key, version)
     try:
         with io.open(fname, 'rb') as f:
             if not self._is_expired(f):
                 return pickle.loads(zlib.decompress(f.read()))
     except IOError as e:
         if e.errno != errno.ENOENT:
             raise
     return default
Пример #26
0
 def get(self, key, default=None, version=None):
     fname = self._key_to_file(key, version)
     if os.path.exists(fname):
         try:
             with io.open(fname, 'rb') as f:
                 if not self._is_expired(f):
                     return pickle.loads(zlib.decompress(f.read()))
         except IOError as e:
             if e.errno == errno.ENOENT:
                 pass  # Cache file was removed after the exists check
     return default
Пример #27
0
 def get(self, key, default=None, version=None):
     fname = self._key_to_file(key, version)
     if os.path.exists(fname):
         try:
             with io.open(fname, 'rb') as f:
                 if not self._is_expired(f):
                     return pickle.loads(zlib.decompress(f.read()))
         except IOError as e:
             if e.errno == errno.ENOENT:
                 pass  # Cache file was removed after the exists check
     return default
Пример #28
0
 def test_group_send(self):
     users = [self.user, self.user2]
     send(users, "label", queue=True)
     send(users, "label", queue=True)
     self.assertEqual(len(mail.outbox), 0)
     self.assertEqual(NoticeQueueBatch.objects.count(), 2)
     send__all_grouped()
     self.assertEqual(len(mail.outbox), 2)
     batch = NoticeQueueBatch.objects.all()[0]
     notices = pickle.loads(base64.b64decode(batch.pickled_data))
     self.assertEqual(len(notices), 2)
Пример #29
0
 def test_group_send(self):
     users = [self.user, self.user2]
     send(users, "label", queue=True)
     send(users, "label", queue=True)
     self.assertEqual(len(mail.outbox), 0)
     self.assertEqual(NoticeQueueBatch.objects.count(), 2)
     send__all_grouped()
     self.assertEqual(len(mail.outbox), 2)
     batch = NoticeQueueBatch.objects.all()[0]
     notices = pickle.loads(base64.b64decode(batch.pickled_data))
     self.assertEqual(len(notices), 2)
Пример #30
0
    def test_pickle(self):
        letters = 'abcde'
        cache.clear()

        for num, val in enumerate(letters):
            cache.set(val, num)

        data = pickle.dumps(cache)
        other = pickle.loads(data)

        for key in letters:
            self.assertEqual(other.get(key), cache.get(key))
Пример #31
0
    def test_send(self):
        self.assertRaises(AssertionError, send, queue=True, now=True)

        users = [self.user, self.user2]
        send(users, "label", now=True)
        self.assertEqual(len(mail.outbox), 2)
        self.assertIn(self.user.email, mail.outbox[0].to)
        self.assertIn(self.user2.email, mail.outbox[1].to)

        send(users, "label", queue=True)
        self.assertEqual(NoticeQueueBatch.objects.count(), 1)
        batch = NoticeQueueBatch.objects.all()[0]
        notices = pickle.loads(base64.b64decode(batch.pickled_data))
        self.assertEqual(len(notices), 2)
Пример #32
0
    def result(self):
        if not self.complete:
            raise RuntimeError("Gearman task hasn't been executed yet")
        elif not self.pending.result:
            raise ValueError("Unexpected empty result from Gearman job")

        job_result = cPickle.loads(self.pending.result)

        try:
            return job_result["task_results"]
        except KeyError:
            raise ValueError(
                "Expected a map containing 'task_results', but got: {!r}".
                format(job_result))
Пример #33
0
 def decode(self, session_data):
     encoded_data = base64.b64decode(force_bytes(session_data))
     try:
         # could produce ValueError if there is no ':'
         hash, pickled = encoded_data.split(b':', 1)
         expected_hash = self._hash(pickled)
         if not constant_time_compare(hash.decode(), expected_hash):
             raise SuspiciousOperation("Session data corrupted")
         else:
             return pickle.loads(pickled)
     except Exception:
         # ValueError, SuspiciousOperation, unpickling exceptions. If any of
         # these happen, just return an empty dictionary (an empty session).
         return {}
Пример #34
0
    def get(self, key, default=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)

        with self._lock.reader(): 读锁
            exp = self._expire_info.get(key)

            if exp is None:
                return default
            elif exp > time.time(): 没有过期
                try:
                    pickled = self._cache[key]
                    return pickle.loads(pickled)
                except pickle.PickleError:
                    return default
Пример #35
0
    def _value_from_db_collection(self, value, field, field_kind, db_type):
        """
        Recursively deconverts values for AbstractIterableFields.

        Assumes that all values in a collection can be deconverted
        using a single field (Field.item_field, possibly a RawField).

        Returns a value in a format proper for the field kind (the
        value will normally not go through to_python).
        """
        subfield, subkind, db_subtype = self._convert_as(field.item_field)

        # Unpickle (a dict) if a serialized storage is used.
        if db_type == 'bytes' or db_type == 'string':
            value = pickle.loads(value)

        if field_kind == 'DictField':

            # Generator yielding pairs with deconverted values, the
            # "list" db_type stores keys and values interleaved.
            if db_type == 'list':
                value = zip(value[::2], value[1::2])
            else:
                value = value.iteritems()

            # DictField needs to hold a dict.
            return dict(
                (key, self._value_from_db(subvalue, subfield,
                                          subkind, db_subtype))
                for key, subvalue in value)
        else:

            # Generator yielding deconverted items.
            value = (
                self._value_from_db(subvalue, subfield,
                                    subkind, db_subtype)
                for subvalue in value)

            # The value will be available from the field without any
            # further processing and it has to have the right type.
            if field_kind == 'ListField':
                return list(value)
            elif field_kind == 'SetField':
                return set(value)

            # A new field kind? Maybe it can take a generator.
            return value
Пример #36
0
    def _value_from_db_collection(self, value, field, field_kind, db_type):
        """
        Recursively deconverts values for AbstractIterableFields.

        Assumes that all values in a collection can be deconverted
        using a single field (Field.item_field, possibly a RawField).

        Returns a value in a format proper for the field kind (the
        value will normally not go through to_python).
        """
        subfield, subkind, db_subtype = self._convert_as(field.item_field)

        # Unpickle (a dict) if a serialized storage is used.
        if db_type == 'bytes' or db_type == 'string':
            value = pickle.loads(value)

        if field_kind == 'DictField':

            # Generator yielding pairs with deconverted values, the
            # "list" db_type stores keys and values interleaved.
            if db_type == 'list':
                value = zip(value[::2], value[1::2])
            else:
                value = six.iteritems(value)

            # DictField needs to hold a dict.
            return dict(
                (key,
                 self._value_from_db(subvalue, subfield, subkind, db_subtype))
                for key, subvalue in value)
        else:

            # Generator yielding deconverted items.
            value = (self._value_from_db(subvalue, subfield, subkind,
                                         db_subtype) for subvalue in value)

            # The value will be available from the field without any
            # further processing and it has to have the right type.
            if field_kind == 'ListField':
                return list(value)
            elif field_kind == 'SetField':
                return set(value)

            # A new field kind? Maybe it can take a generator.
            return value
Пример #37
0
Файл: locmem.py Проект: 10sr/hue
 def get(self, key, default=None, version=None):
     key = self.make_key(key, version=version)
     self.validate_key(key)
     with self._lock.reader():
         exp = self._expire_info.get(key, 0)
         if exp is None or exp > time.time():
             try:
                 pickled = self._cache[key]
                 return pickle.loads(pickled)
             except pickle.PickleError:
                 return default
     with self._lock.writer():
         try:
             del self._cache[key]
             del self._expire_info[key]
         except KeyError:
             pass
         return default
Пример #38
0
    def decode(self, value, value_type):
        """
        Take a value blob and its value_type one-char code and convert it back
        to a python object
        """
        if value_type == 'i':
            return int(value)

        if value_type == 'z':
            value = zlib.decompress(value)
            value_type = 'p'

        if value_type == 'p':
            return pickle.loads(force_bytes(value))

        raise ValueError(
            "Unknown value_type '{}' read from the cache table.".format(
                value_type))
Пример #39
0
 def decode(self, session_data):
     encoded_data = base64.b64decode(force_bytes(session_data))
     try:
         # could produce ValueError if there is no ':'
         hash, pickled = encoded_data.split(b':', 1)
         expected_hash = self._hash(pickled)
         if not constant_time_compare(hash.decode(), expected_hash):
             raise SuspiciousSession("Session data corrupted")
         else:
             return pickle.loads(pickled)
     except Exception as e:
         # ValueError, SuspiciousOperation, unpickling exceptions. If any of
         # these happen, just return an empty dictionary (an empty session).
         if isinstance(e, SuspiciousOperation):
             logger = logging.getLogger('django.security.%s' %
                     e.__class__.__name__)
             logger.warning(force_text(e))
         return {}
Пример #40
0
 def get(self, key, default=None, version=None):
     key = self.make_key(key, version=version)
     self.validate_key(key)
     with self._lock.reader():
         exp = self._expire_info.get(key, 0)
         if exp is None or exp > time.time():
             try:
                 pickled = self._cache[key]
                 return pickle.loads(pickled)
             except pickle.PickleError:
                 return default
     with self._lock.writer():
         try:
             del self._cache[key]
             del self._expire_info[key]
         except KeyError:
             pass
         return default
Пример #41
0
    def decode(self, value, value_type):
        """
        Take a value blob and its value_type one-char code and convert it back
        to a python object
        """
        if value_type == 'i':
            return int(value)

        if value_type == 'z':
            value = zlib.decompress(value)
            value_type = 'p'

        if value_type == 'p':
            return pickle.loads(force_bytes(value))

        raise ValueError(
            "Unknown value_type '{}' read from the cache table."
            .format(value_type)
        )
Пример #42
0
    def get(self, key, default=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        db = router.db_for_read(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)
        cursor = connections[db].cursor()

        cursor.execute("SELECT cache_key, value, expires FROM %s " "WHERE cache_key = %%s" % table, [key])
        row = cursor.fetchone()
        if row is None:
            return default
        now = timezone.now()
        if row[2] < now:
            db = router.db_for_write(self.cache_model_class)
            cursor = connections[db].cursor()
            cursor.execute("DELETE FROM %s " "WHERE cache_key = %%s" % table, [key])
            transaction.commit_unless_managed(using=db)
            return default
        value = connections[db].ops.process_clob(row[1])
        return pickle.loads(base64.b64decode(force_bytes(value)))
Пример #43
0
    def get(self, key, default=None, version=None, acquire_lock=True):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        pickled = None
        with (self._lock.reader() if acquire_lock else dummy()):
            if not self._has_expired(key):
                pickled = self._cache[key]
        if pickled is not None:
            try:
                return pickle.loads(pickled)
            except pickle.PickleError:
                return default

        with (self._lock.writer() if acquire_lock else dummy()):
            try:
                del self._cache[key]
                del self._expire_info[key]
            except KeyError:
                pass
            return default
Пример #44
0
    def get(self, key, default=None, version=None, acquire_lock=True):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        pickled = None
        with (self._lock.reader() if acquire_lock else dummy()):
            if not self._has_expired(key):
                pickled = self._cache[key]
        if pickled is not None:
            try:
                return pickle.loads(pickled)
            except pickle.PickleError:
                return default

        with (self._lock.writer() if acquire_lock else dummy()):
            try:
                del self._cache[key]
                del self._expire_info[key]
            except KeyError:
                pass
            return default
Пример #45
0
def pickle_loads(pickled_str):
    """Return the unpickled data from a pickle payload.

    Args:
        pickled_str (bytes):
            The pickled data.

    Returns:
        object:
        The unpickled data.
    """
    if isinstance(pickled_str, six.text_type):
        pickled_str = pickled_str.encode('latin1')

    try:
        return pickle.loads(pickled_str)
    except AttributeError:
        # We failed to load something from the pickled data. We have to try
        # again with our own unpickler, which unfortunately won't benefit from
        # cPickle, but it at least lets us remap things.
        return DjangoCompatUnpickler(io.BytesIO(pickled_str)).load()
Пример #46
0
    def get(self, key, default=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        db = router.db_for_read(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)
        cursor = connections[db].cursor()

        cursor.execute("SELECT cache_key, value, expires FROM %s "
                       "WHERE cache_key = %%s" % table, [key])
        row = cursor.fetchone()
        if row is None:
            return default
        now = timezone.now()
        if row[2] < now:
            db = router.db_for_write(self.cache_model_class)
            cursor = connections[db].cursor()
            cursor.execute("DELETE FROM %s "
                           "WHERE cache_key = %%s" % table, [key])
            transaction.commit_unless_managed(using=db)
            return default
        value = connections[db].ops.process_clob(row[1])
        return pickle.loads(base64.b64decode(force_bytes(value)))
Пример #47
0
    def get(self, key, default=None, version=None, acquire_lock=True, timeout_local=DEFAULT_TIMEOUT):
        """
        get value. get --> local --> remote.

        If get remote failed, reuse local value.

        :param key: key
        :param default: 如果local和remote都为None,返回default
        :param version: version
        :param timeout_local: 默认本地的缓存过期时间
        :return:
        """
        make_key = self.localc.make_key(key, version=version)
        self.localc.validate_key(make_key)
        value = None
        with (self.localc._lock.reader() if acquire_lock else dummy()):
            pickled = self.localc._cache.get(make_key)
            expired = self.localc._has_expired(make_key)
        if pickled is not None:
            try:
                value = pickle.loads(pickled)
            except pickle.PickleError:
                value = None

        if (expired is False) and (value is not None):
            return value
        elif expired:
            self.localc.delete(key, version=version)

        # local数据异常,查remote cache
        try:
            # 尝试从remote cache 读取
            remote_value = self.remotec.get(key=key, version=version)
        except Exception, ex:
            # 如果backend cache异常,且cache值不为None,则local重新缓存这个key,
            # 并且延长timeout_local设定的时间, 下次会直接命中local
            self._printexc(ex)
            if value is not None:
                self.localc.set(key, value, timeout=timeout_local, version=version)
Пример #48
0
 def loads(self, data):
     return pickle.loads(data)
Пример #49
0
 def loads(self, data):
     return pickle.loads(data)
Пример #50
0
 def _load(self, value):
     return pickle.loads(value)
Пример #51
0
"File-based cache backend"
Пример #52
0
        now = timezone.now()

        row[2] 是时间
        if row[2] < now: 事件比较新
            db = router.db_for_write(self.cache_model_class)
            cursor = connections[db].cursor()

            cursor.execute("DELETE FROM %s "
                           "WHERE cache_key = %%s" % table, [key])

            transaction.commit_unless_managed(using=db) 事务

            return default

        value = connections[db].ops.process_clob(row[1])
        return pickle.loads(base64.b64decode(force_bytes(value))) base64 编码

    def set(self, key, value, timeout=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        self._base_set('set', key, value, timeout)

    def add(self, key, value, timeout=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        return self._base_set('add', key, value, timeout)

    def _base_set(self, mode, key, value, timeout=None):

        if timeout is None: timeout 是超时的时间, 过了这个时间, 就没有效果了
            timeout = self.default_timeout
Пример #53
0
"Thread-safe in-memory cache backend."