示例#1
0
 def _get(self, key):
     to_get = ['%s.%s' % (key, i) for i in range_type(self.maxchunk)]
     result = super(SpreadSASLMemcachedCache, self).get_many(*to_get)
     serialized = ''.join([v for v in result if v is not None])
     if not serialized:
         return None
     return pickle.loads(serialized)
示例#2
0
    def _get(self, key):
        to_get = ["%s.%s" % (key, i) for i in range_type(self.maxchunk)]
        result = super(SpreadSASLMemcachedCache, self).get_many(*to_get)

        if PY2:
            serialized = "".join(v for v in result if v is not None)
        else:
            serialized = b"".join(v for v in result if v is not None)

        if not serialized:
            return None

        return pickle.loads(serialized)
示例#3
0
    def _get(self, key):
        to_get = ["%s.%s" % (key, i) for i in range_type(self.maxchunk)]
        result = super(SpreadSASLMemcachedCache, self).get_many(*to_get)

        if PY2:
            serialized = "".join(v for v in result if v is not None)
        else:
            serialized = b"".join(v for v in result if v is not None)

        if not serialized:
            return None

        return pickle.loads(serialized)
示例#4
0
    def _set(self, key, value, timeout=None):
        # pickling/unpickling add an overhead,
        # I didn't found a good way to avoid pickling/unpickling if
        # key is smaller than chunksize, because in case or <werkzeug.requests>
        # getting the length consume the data iterator.
        serialized = pickle.dumps(value, 2)
        values = {}
        len_ser = len(serialized)
        chks = range_type(0, len_ser, self.chunksize)

        if len(chks) > self.maxchunk:
            raise ValueError("Cannot store value in less than %s keys" % self.maxchunk)

        for i in chks:
            values["%s.%s" % (key, i // self.chunksize)] = serialized[
                i : i + self.chunksize
            ]

        super(SpreadSASLMemcachedCache, self).set_many(values, timeout)
示例#5
0
    def _set(self, key, value, timeout=None):
        # pickling/unpickling add an overhead,
        # I didn't found a good way to avoid pickling/unpickling if
        # key is smaller than chunksize, because in case or <werkzeug.requests>
        # getting the length consume the data iterator.
        serialized = pickle.dumps(value, 2)
        values = {}
        len_ser = len(serialized)
        chks = range_type(0, len_ser, self.chunksize)

        if len(chks) > self.maxchunk:
            raise ValueError(
                "Cannot store value in less than %s keys" % self.maxchunk
            )

        for i in chks:
            values["%s.%s" % (key, i // self.chunksize)] = serialized[
                i:i + self.chunksize
            ]

        super(SpreadSASLMemcachedCache, self).set_many(values, timeout)
示例#6
0
 def _genkeys(self, key):
     return ['%s.%s' % (key, i) for i in range_type(self.maxchunk)]
示例#7
0
 def _genkeys(self, key):
     return ["%s.%s" % (key, i) for i in range_type(self.maxchunk)]