def test_get_inode(self): open('test', 'w').write('hello') cache = pdk.cache.Cache(os.path.join(self.work_dir, 'cache')) cache.import_file(FileLocator('', 'test', None, None, None), NullMassProgress()) expected_ids = ('sha-1:aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d', 'md5:5d41402abc4b2a76b9719d911017c592') inodes = Set([cache.get_inode(i) for i in expected_ids]) self.assert_equals(1, len(inodes))
def test_get_inode(self): open ('test', 'w').write('hello') cache = pdk.cache.Cache(os.path.join(self.work_dir, 'cache')) cache.import_file(FileLocator('', 'test', None, None, None), NullMassProgress()) expected_ids = ('sha-1:aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d', 'md5:5d41402abc4b2a76b9719d911017c592') inodes = Set([ cache.get_inode(i) for i in expected_ids ]) self.assert_equals(1, len(inodes))
def test_cache_containment(self): """id in cache returns whether or not a file exists in the cache""" open('test', 'w').write('hello') cache = pdk.cache.Cache(os.path.join(self.work_dir, 'cache')) cache.import_file(FileLocator('', 'test', None, None, None), NullMassProgress()) expected_ids = ('sha-1:aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d', 'md5:5d41402abc4b2a76b9719d911017c592') for expected_id in expected_ids: assert expected_id in cache
def test_cache_containment(self): """id in cache returns whether or not a file exists in the cache""" open ('test', 'w').write('hello') cache = pdk.cache.Cache(os.path.join(self.work_dir, 'cache')) cache.import_file(FileLocator('', 'test', None, None, None), NullMassProgress()) expected_ids = ('sha-1:aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d', 'md5:5d41402abc4b2a76b9719d911017c592') for expected_id in expected_ids: assert expected_id in cache
def test_acquire_file(self): """acquire file downloads the contents of the file_object. The contents should end up under the given tempoarary id. The return value should be new blob_ids.""" open('test', 'w').write('hello') cache = pdk.cache.Cache(os.path.join(self.work_dir, 'cache')) cache.import_file(FileLocator('', 'test', None, None, None), NullMassProgress()) expected_ids = ('sha-1:aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d', 'md5:5d41402abc4b2a76b9719d911017c592') for expected_id in expected_ids: assert os.path.exists(cache.file_path(expected_id))
def test_acquire_file(self): """acquire file downloads the contents of the file_object. The contents should end up under the given tempoarary id. The return value should be new blob_ids.""" open ('test', 'w').write('hello') cache = pdk.cache.Cache(os.path.join(self.work_dir, 'cache')) cache.import_file(FileLocator('', 'test', None, None, None), NullMassProgress()) expected_ids = ('sha-1:aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d', 'md5:5d41402abc4b2a76b9719d911017c592') for expected_id in expected_ids: assert os.path.exists(cache.file_path(expected_id))
def test_cache_files(self): # This is being invalidated by changes in cache layout # Open a local file open('hi.txt', 'w').write('hello') # Copy it into the cache cache = pdk.cache.Cache(os.path.join(self.work_dir, 'cache')) cache.import_file(FileLocator('', 'hi.txt', None, None, None), NullMassProgress()) expected_blob_id = 'sha-1:aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d' cache_file = cache.file_path(expected_blob_id) assert os.path.exists(cache_file), cache_file + " expected" assert (expected_blob_id in cache) self.assert_equal(os.path.abspath('cache/md5/th/md5:this-one-md5'), os.path.abspath(cache.file_path('md5:this-one-md5')))
def test_cache_files(self): # This is being invalidated by changes in cache layout # Open a local file open('hi.txt', 'w').write('hello') # Copy it into the cache cache = pdk.cache.Cache(os.path.join(self.work_dir, 'cache')) cache.import_file(FileLocator('', 'hi.txt', None, None, None), NullMassProgress()) expected_blob_id = 'sha-1:aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d' cache_file = cache.file_path(expected_blob_id) assert os.path.exists(cache_file), cache_file+" expected" assert(expected_blob_id in cache) self.assert_equal( os.path.abspath('cache/md5/th/md5:this-one-md5') , os.path.abspath(cache.file_path('md5:this-one-md5')) )