def test_unify_with_exclusion(): path = tests_data_path / 'Folder0' # run listing0_no3, tree0_no3, forbidden0_no3 = aw.walk(path, exclusion={'Folder3'}) listing3, tree3, forbidden3 = aw.walk(path / 'Folder3') listing_uni, tree_uni, forbid_uni = aw.unify([listing0_no3, listing3], [tree0_no3, tree3], [forbidden0_no3, forbidden3]) listing0_full, tree0_full, forbidden0_full = aw.walk(path) # remove the root folder record # as the run with exclusion does not give the full size and hash # normally the unification is on separate folders listing_uni.pop(('7e472b2b54ba97314c63988db267d125', 'DIR', 2698920)) listing0_full.pop(('4f8c48630a797715e8b86466e0218aa1', 'DIR', 3598557)) tree_uni = { pointer: content for pointer, content in tree_uni.items() if pointer[at.PATH] != tests_data_path / 'Folder0' } tree0_full = { pointer: content for pointer, content in tree0_full.items() if pointer[at.PATH] != tests_data_path / 'Folder0' } # verify assert listing_uni == listing0_full assert tree_uni == tree0_full assert forbid_uni == forbidden0_full
def _walk_with_progressbar(path, exclusion=None): pbar_nb_files = tqdm.tqdm(total=1, desc='Exploring', unit=' files', unit_scale=False) l, t, f = aw.walk(path, exclusion=exclusion, should_hash=False, pbar=pbar_nb_files) path_size = t[path][2] pbar_nb_files.close() pbar_size = tqdm.tqdm(total=path_size, desc='Indexing ', unit='B', unit_scale=True, unit_divisor=1024) listing, tree, forbidden = aw.walk(path, exclusion=exclusion, should_hash=True, pbar=pbar_size) pbar_size.close() return listing, tree, forbidden
def test_missing_not_fully_included(): path = tests_data_path / 'Folder0' # run listing8, tree8, forbidden8 = aw.walk(tests_data_path / 'Folder8') listing0, tree0, forbidden0 = aw.walk(path) missing_listing = aw.get_missing(listing8, listing0) # for logging purpose only if debug: asd.save_json_index(path, missing_listing, start_path=tests_data_path, prefix='missing_not_fully_included_') # load expected expected_missing_listing = asd.load_json_listing( path / '.alfeios_expected' / 'listing_missing_from_Folder8.json', start_path=tests_data_path) # reset mtime for everybody as it is updated with the test itself missing_listing = reset_listing_mtime(missing_listing) expected_missing_listing = reset_listing_mtime(expected_missing_listing) # verify assert missing_listing == expected_missing_listing
def test_unify(): path0 = tests_data_path / 'Folder0' path8 = tests_data_path / 'Folder8' # run listing0, tree0, forbidden0 = aw.walk(path0) listing8, tree8, forbidden8 = aw.walk(path8) listing, tree, forbidden = aw.unify([listing0, listing8], [tree0, tree8], [forbidden0, forbidden8]) # load expected expected_listing = asd.load_json_listing( tests_data_path / '.alfeios_expected' / 'listing_0_8.json', start_path=tests_data_path) expected_tree = asd.load_json_tree(tests_data_path / '.alfeios_expected' / 'tree_0_8.json', start_path=tests_data_path) # reset mtime for everybody as it is updated with the test itself listing = reset_listing_mtime(listing) expected_listing = reset_listing_mtime(expected_listing) tree = reset_tree_mtime(tree) expected_tree = reset_tree_mtime(expected_tree) # verify assert listing == expected_listing assert tree == expected_tree assert forbidden == {}
def test_walk(folder, name): path = tests_data_path / folder # run listing, tree, forbidden = aw.walk(path) # for logging purpose only if debug: asd.save_json_index(path, listing, tree, forbidden, start_path=tests_data_path) # load expected expected_listing = asd.load_json_listing(path / '.alfeios_expected' / 'listing.json', start_path=tests_data_path) expected_tree = asd.load_json_tree(path / '.alfeios_expected' / 'tree.json', start_path=tests_data_path) # reset mtime for everybody as it is updated with the test itself listing = reset_listing_mtime(listing) expected_listing = reset_listing_mtime(expected_listing) tree = reset_tree_mtime(tree) expected_tree = reset_tree_mtime(expected_tree) # verify assert listing == expected_listing assert tree == expected_tree assert forbidden == {}
def test_duplicate_with_zip(): # run listing, tree, forbidden = aw.walk(tests_data_path) duplicate_listing, size_gain = aw.get_duplicate(listing) # for logging purpose only if debug: asd.save_json_index(tests_data_path, duplicate_listing, start_path=tests_data_path, prefix='duplicate_with_zip_') # verify # here we only check that the root directory content of 4 folders are equal # it sould be enough thanks to the Merkle tree property of alfeios listing duplicate_root_content = ('4f8c48630a797715e8b86466e0218aa1', 'DIR', 3598557) duplicate_root_pointers = duplicate_listing[duplicate_root_content] # remove mtime for everybody as it is updated with the test itself duplicate_root_directories = { path for path, mtime in duplicate_root_pointers } assert duplicate_root_directories == { tests_data_path / 'Folder0', tests_data_path / 'FolderZipFile', tests_data_path / 'FolderZipFolder', tests_data_path / 'FolderZipNested' }
def test_duplicate(): path = tests_data_path / 'Folder0' / 'Folder3' # run listing, tree, forbidden = aw.walk(path) duplicate_listing, size_gain = aw.get_duplicate(listing) # for logging purpose only if debug: asd.save_json_index(path, duplicate_listing, start_path=tests_data_path, prefix='duplicate_') # load expected expected_duplicate_listing = asd.load_json_listing( path / '.alfeios_expected' / 'duplicate_listing.json', start_path=tests_data_path) # reset mtime for everybody as it is updated with the test itself duplicate_listing = reset_listing_mtime(duplicate_listing) expected_duplicate_listing = reset_listing_mtime( expected_duplicate_listing) # verify assert duplicate_listing == expected_duplicate_listing assert size_gain == 367645
def test_walk_with_exclusions(): path = tests_data_path / 'Folder0' exclusion = {'Folder3', 'Folder4_1', 'file3.txt', 'groundhog.png'} # run listing, tree, forbidden = aw.walk(path, exclusion=exclusion) # for logging purpose only if debug: asd.save_json_index(path, listing, tree, forbidden, start_path=tests_data_path, prefix='with_exclusions_') # load expected expected_listing = asd.load_json_listing(path / '.alfeios_expected' / 'listing_with_exclusions.json', start_path=tests_data_path) expected_tree = asd.load_json_tree(path / '.alfeios_expected' / 'tree_with_exclusions.json', start_path=tests_data_path) # reset mtime for everybody as it is updated with the test itself listing = reset_listing_mtime(listing) expected_listing = reset_listing_mtime(expected_listing) tree = reset_tree_mtime(tree) expected_tree = reset_tree_mtime(expected_tree) # verify assert listing == expected_listing assert tree == expected_tree assert forbidden == {}
def test_missing_fully_included(): path = tests_data_path / 'Folder0' # run listing3, tree3, forbidden3 = aw.walk(path / 'Folder3') listing0, tree0, forbidden0 = aw.walk(path) missing_listing = aw.get_missing(listing3, listing0) # for logging purpose only if debug: asd.save_json_index(path, missing_listing, start_path=tests_data_path, prefix='missing_fully_included_') # verify assert missing_listing == {}