def test_compression_cache(self): '''Test a two-level cache where the upstream compresses files ''' from databundles.cache.filesystem import FsCache,FsCompressionCache root = self.rc.group('filesystem').root_dir l1_repo_dir = os.path.join(root,'comp-repo-l1') os.makedirs(l1_repo_dir) l2_repo_dir = os.path.join(root,'comp-repo-l2') os.makedirs(l2_repo_dir) testfile = os.path.join(root,'testfile') with open(testfile,'w+') as f: for i in range(1024): #@UnusedVariable f.write('.'*1023) f.write('\n') # Create a cache with an upstream wrapped in compression l3 = FsCache(l2_repo_dir) l2 = FsCompressionCache(l3) l1 = FsCache(l1_repo_dir, upstream=l2) f1 = l1.put(testfile,'tf1') self.assertTrue(os.path.exists(f1)) l1.remove('tf1', propagate=False) self.assertFalse(os.path.exists(f1)) f1 = l1.get('tf1') self.assertIsNotNone(f1) self.assertTrue(os.path.exists(f1))
def test_cache(self): from databundles.cache.filesystem import FsCache, FsLimitedCache root = self.rc.group('filesystem').root_dir l1_repo_dir = os.path.join(root,'repo-l1') os.makedirs(l1_repo_dir) l2_repo_dir = os.path.join(root,'repo-l2') os.makedirs(l2_repo_dir) testfile = os.path.join(root,'testfile') with open(testfile,'w+') as f: for i in range(1024): f.write('.'*1023) f.write('\n') # # Basic operations on a cache with no upstream # l2 = FsCache(l2_repo_dir) p = l2.put(testfile,'tf1') l2.put(testfile,'tf2') g = l2.get('tf1') self.assertTrue(os.path.exists(p)) self.assertTrue(os.path.exists(g)) self.assertEqual(p,g) self.assertIsNone(l2.get('foobar')) l2.remove('tf1') self.assertIsNone(l2.get('tf1')) # # Now create the cache with an upstream, the first # cache we created l1 = FsLimitedCache(l1_repo_dir, upstream=l2, size=5) print l1 print l2 g = l1.get('tf2') self.assertTrue(g is not None) # Put to one and check in the other. l1.put(testfile,'write-through') self.assertIsNotNone(l2.get('write-through')) l1.remove('write-through', propagate=True) self.assertIsNone(l2.get('write-through')) # Put a bunch of files in, and check that # l2 gets all of the files, but the size of l1 says constrained for i in range(0,10): l1.put(testfile,'many'+str(i)) self.assertEquals(4194304, l1.size) # Check that the right files got deleted self.assertFalse(os.path.exists(os.path.join(l1.cache_dir, 'many1'))) self.assertFalse(os.path.exists(os.path.join(l1.cache_dir, 'many5'))) self.assertTrue(os.path.exists(os.path.join(l1.cache_dir, 'many6'))) # Fetch a file that was displaced, to check that it gets loaded back # into the cache. p = l1.get('many1') p = l1.get('many2') self.assertTrue(p is not None) self.assertTrue(os.path.exists(os.path.join(l1.cache_dir, 'many1'))) # Should have deleted many6 self.assertFalse(os.path.exists(os.path.join(l1.cache_dir, 'many6'))) self.assertTrue(os.path.exists(os.path.join(l1.cache_dir, 'many7'))) # # Check that verification works # l1.verify() os.remove(os.path.join(l1.cache_dir, 'many8')) with self.assertRaises(Exception): l1.verify() l1.remove('many8') l1.verify() c = l1.database.cursor() c.execute("DELETE FROM files WHERE path = ?", ('many9',) ) l1.database.commit() with self.assertRaises(Exception): l1.verify() l1.remove('many9') l1.verify()