def test_builder_checkup(self): # generate shards for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, debug=True) generated = bucket.build(self.store_path) # make sure all files are there self.assertTrue(bucket.checkup(self.store_path)) # remove one of the files remove_file = random.choice(list(generated.keys())) os.remove(os.path.join(self.store_path, remove_file)) # check again, should fail self.assertFalse(bucket.checkup(self.store_path))
def test_builder_checkup(self): # generate shards for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size) generated = bucket.build(self.store_path) # make sure all files are there self.assertTrue(bucket.checkup(self.store_path)) # remove one of the files remove_file = random.choice(list(generated.keys())) os.remove(os.path.join(self.store_path, remove_file)) # check again, should fail self.assertFalse(bucket.checkup(self.store_path))
def test_builder_skips_existing(self): # generate shards for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, debug=True) generated = bucket.build(self.store_path) timestamp = time.time() # remove one of the files remove_file = random.choice(list(generated.keys())) os.remove(os.path.join(self.store_path, remove_file)) # generate only missing shard for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, debug=True) generated = bucket.build(self.store_path) # verify last access times for seed, file_hash in generated.items(): path = os.path.join(self.store_path, seed) last_access = os.path.getmtime(path) if seed == remove_file: self.assertTrue(last_access > timestamp) else: self.assertTrue(last_access < timestamp) # make sure all files are there self.assertTrue(bucket.checkup(self.store_path))
def test_builder_skips_existing(self): # generate shards for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size) generated = bucket.build(self.store_path, False, False) timestamp = time.time() # remove one of the files remove_file = random.choice(list(generated.keys())) os.remove(os.path.join(self.store_path, remove_file)) # generate only missing shard for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size) generated = bucket.build(self.store_path, False, False) # verify last access times for seed, file_hash in generated.items(): path = os.path.join(self.store_path, seed) last_access = os.path.getmtime(path) if seed == remove_file: self.assertTrue(last_access > timestamp) else: self.assertTrue(last_access < timestamp) # make sure all files are there self.assertTrue(bucket.checkup(self.store_path))
def test_build_rebuild(self): # generate shards for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size) bucket.build(self.store_path) # remove one of the files r = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60' os.remove(os.path.join(self.store_path, r)) # check again, should fail self.assertFalse(bucket.checkup(self.store_path)) # rebuild bucket.build(self.store_path, rebuild=True) # check again, should pass self.assertTrue(bucket.checkup(self.store_path))
def test_build_rebuild(self): # generate shards for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, debug=True) bucket.build(self.store_path) # remove one of the files remove_file = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60' os.remove(os.path.join(self.store_path, remove_file)) # check again, should fail self.assertFalse(bucket.checkup(self.store_path)) # rebuild bucket.build(self.store_path, rebuild=True) # check again, should pass self.assertTrue(bucket.checkup(self.store_path))
def test_use_folder_tree_clean(self): bucket = Builder(addresses["beta"], my_shard_size, my_max_size, use_folder_tree=True) bucket.build(self.store_path) self.assertTrue(bucket.checkup(self.store_path)) bucket.clean(self.store_path) def callback(a, d, files): self.assertTrue(len(files) == 0) os.walk(self.store_path, callback, None)
def test_use_folder_tree_clean(self): bucket = Builder(addresses["beta"], my_shard_size, my_max_size, my_min_free_size, use_folder_tree=True) bucket.build(self.store_path) self.assertTrue(bucket.checkup(self.store_path)) bucket.clean(self.store_path) def callback(a, d, files): self.assertTrue(len(files) == 0) os.walk(self.store_path, callback, None)
def test_build_repair(self): # generate shards for testing bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, my_min_free_size) bucket.build(self.store_path) # remove one of the files r = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60' os.remove(os.path.join(self.store_path, r)) # check again, should fail self.assertFalse(bucket.checkup(self.store_path)) # repair bucket.build(self.store_path, repair=True) # check again, should pass self.assertTrue(bucket.checkup(self.store_path)) # modify one of the files o = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60' path = os.path.join(self.store_path, o) sha256_org_file = partialhash.compute(path) # write some data with open(path, "a") as f: f.write("bad data is bad\n") # check their hashes sha256_mod_file = partialhash.compute(path) self.assertNotEqual(sha256_org_file, sha256_mod_file) # build without a repair should fail bucket.build(self.store_path) sha256_mod_file = partialhash.compute(path) self.assertNotEqual(sha256_org_file, sha256_mod_file) # build with a repair should pass bucket.build(self.store_path, repair=True) sha256_mod_file = partialhash.compute(path) self.assertEqual(sha256_org_file, sha256_mod_file)