Exemplo n.º 1
0
    def test_cache_5(self):
        content = self.content * 10
        timestamp = '20041110001122'

        self._cleanup(self.key)
        rhnCache.set(self.key, content, modified=timestamp)

        self.failUnless(rhnCache.has_key(self.key))
        self.failUnless(rhnCache.has_key(self.key, modified=timestamp))
        self.failIf(rhnCache.has_key(self.key, modified='20001122112233'))
        self._cleanup(self.key)
Exemplo n.º 2
0
    def test_cache_5(self):
        content = self.content * 10
        timestamp = '20041110001122'

        self._cleanup(self.key)
        rhnCache.set(self.key, content, modified=timestamp)

        self.failUnless(rhnCache.has_key(self.key))
        self.failUnless(rhnCache.has_key(self.key, modified=timestamp))
        self.failIf(rhnCache.has_key(self.key, modified='20001122112233'))
        self._cleanup(self.key)
Exemplo n.º 3
0
    def _test(self, key, content, **modifiers):
        # Blow it away
        self._cleanup(key)
        apply(rhnCache.set, (key, content), modifiers)
        self.failUnless(rhnCache.has_key(key))
        content2 = apply(rhnCache.get, (key, ), modifiers)
        self.assertEqual(content, content2)

        self._cleanup(key)
        self.failIf(rhnCache.has_key(key))
        return (key, content)
Exemplo n.º 4
0
    def test_cache_5(self):
        content = self.content * 10
        timestamp = '20041110001122'

        self._cleanup(self.key)
        rhnCache.set(self.key, content, modified=timestamp)

        self.assertTrue(rhnCache.has_key(self.key))
        self.assertTrue(rhnCache.has_key(self.key, modified=timestamp))
        self.assertFalse(rhnCache.has_key(self.key, modified='20001122112233'))
        self._cleanup(self.key)
Exemplo n.º 5
0
    def _test(self, key, content, **modifiers):
        # Blow it away
        rhnCache.CACHEDIR = '/tmp/rhn'
        self._cleanup(key)
        rhnCache.set(key, content, **modifiers)
        self.failUnless(rhnCache.has_key(key))
        content2 = rhnCache.get(key, **modifiers)
        self.assertEqual(content, content2)

        self._cleanup(key)
        self.failIf(rhnCache.has_key(key))
        return (key, content)
Exemplo n.º 6
0
    def _test(self, key, content, **modifiers):
        # Blow it away
        rhnCache.CACHEDIR = '/tmp/rhn'
        self._cleanup(key)
        rhnCache.set(key, content, **modifiers)
        self.failUnless(rhnCache.has_key(key))
        content2 = rhnCache.get(key, **modifiers)
        self.assertEqual(content, content2)

        self._cleanup(key)
        self.failIf(rhnCache.has_key(key))
        return (key, content)
Exemplo n.º 7
0
def get(name, modified=None, raw=None, compressed=None):
    # Check to see if the entry is in the database, with the right version
    h = _fetch_cursor(key=name, modified=modified)

    row = h.fetchone_dict()

    if not row:
        # Key not found
        return None

    if modified and row['delta'] != 0:
        # Different version
        log_debug(4, "database cache: different version")
        return None

    if modified is None:
        # The caller doesn't care about the modified time, but we do, since we
        # want to fetch the same version from the disk cache
        modified = row['modified']

    if rhnCache.has_key(name, modified):
        # We have the value
        log_debug(4, "Filesystem cache HIT")
        return rhnCache.get(name, modified=modified, raw=raw)

    log_debug(4, "Filesystem cache MISS")

    # The disk cache doesn't have this key at all, or it's a modified value
    # Fetch the value from the database

    v = row['value']
    # Update the accessed field
    rhnSQL.Procedure("rhn_cache_update_accessed")(name)

    if compressed:
        io = cStringIO.StringIO()

        io.write(rhnSQL.read_lob(v))
        io.seek(0, 0)

        # XXX For about 40M of compressed data sometimes we get:
        # zlib.error: Error -3 while decompressing: incomplete dynamic bit lengths tree
        v = gzip.GzipFile(None, "r", 0, io)

    try:
        data = v.read()
    except (ValueError, IOError, gzip.zlib.error) as e:
        # XXX poking at gzip.zlib may not be that well-advised
        log_error("rhnDatabaseCache: gzip error for key %s: %s" % (name, e))
        # Ignore this entry in the database cache, it has invalid data
        return None

    # We store the data in the database cache, in raw format
    rhnCache.set(name, data, modified=modified, raw=1)

    # Unpickle the data, unless raw access was requested
    if not raw:
        return cPickle.loads(data)

    return data
Exemplo n.º 8
0
def get(name, modified = None, raw = None, compressed = None):
    # Check to see if the entry is in the database, with the right version
    h = _fetch_cursor(key=name, modified=modified)

    row = h.fetchone_dict()

    if not row:
        # Key not found
        return None

    if modified and row['delta'] != 0:
        # Different version
        log_debug(4, "database cache: different version")
        return None

    if modified is None:
        # The caller doesn't care about the modified time, but we do, since we
        # want to fetch the same version from the disk cache
        modified = row['modified']

    if rhnCache.has_key(name, modified):
        # We have the value
        log_debug(4, "Filesystem cache HIT")
        return rhnCache.get(name, modified=modified, raw=raw)

    log_debug(4, "Filesystem cache MISS")

    # The disk cache doesn't have this key at all, or it's a modified value
    # Fetch the value from the database

    v = row['value']
    # Update the accessed field
    rhnSQL.Procedure("rhn_cache_update_accessed")(name)

    if compressed:
        io = cStringIO.StringIO()

        io.write(rhnSQL.read_lob(v))
        io.seek(0, 0)

        # XXX For about 40M of compressed data sometimes we get:
        # zlib.error: Error -3 while decompressing: incomplete dynamic bit lengths tree
        v = gzip.GzipFile(None, "r", 0, io)

    try:
        data = v.read()
    except (ValueError, IOError, gzip.zlib.error), e:
        # XXX poking at gzip.zlib may not be that well-advised
        log_error("rhnDatabaseCache: gzip error for key %s: %s" % (
            name, e))
        # Ignore this entry in the database cache, it has invalid data
        return None
Exemplo n.º 9
0
    def no_test_as_streams_1(self):
        "Tests storing and retrieval as streams"
        t = tempfile.TemporaryFile()
        content = self.content * 100
        t.write(content)
        t.seek(0, 0)

        self._cleanup(self.key)
        rhnCache.set(self.key, None, raw=1, stream=t)
        self.failUnless(rhnCache.has_key(self.key))

        ss = rhnCache.get(self.key, as_stream=1)
        self.failUnless(hasattr(ss, "read"))
        content2 = ss.read()

        self.assertEquals(content, content2)
        self._cleanup(self.key)
Exemplo n.º 10
0
    def _cleanup(self, key):
        if rhnCache.has_key(key):
            rhnCache.delete(key)

        self.failIf(rhnCache.has_key(key))
Exemplo n.º 11
0
 def cache_has_key(self, object_id, timestamp=None):
     # Get the key
     key = self._get_key(object_id)
     return rhnCache.has_key(key, modified=timestamp)
Exemplo n.º 12
0
 def has_key(self, key):
     rkey = self._compute_key(key)
     return rhnCache.has_key(rkey)
Exemplo n.º 13
0
    def _cleanup(self, key):
        if rhnCache.has_key(key):
            rhnCache.delete(key)

        self.failIf(rhnCache.has_key(key))
Exemplo n.º 14
0
 def cache_has_key(self, object_id, timestamp=None):
     # Get the key
     key = self._get_key(object_id)
     return rhnCache.has_key(key, modified=timestamp)
Exemplo n.º 15
0
    def _cleanup(self, key):
        if rhnCache.has_key(key):
            rhnCache.delete(key)

        self.assertFalse(rhnCache.has_key(key))