def test_load_index_with_corrupted_index(self): test_file = self.get_temp_file('test') with open(test_file, 'wb') as tf: tf.write('some junk') try: load_index(test_file) self.fail('No exception raised for corrupted index file.') except Exception as ex: assert 'Failed to load license cache' in str(ex)
def test_build_index(self): # note: this is a rather complex test because caching involves some globals cache_dir = self.get_temp_dir('index_cache') lock_file, checksum_file, cache_file = get_license_cache_paths(cache_dir=cache_dir) tree_base_dir = self.get_temp_dir('src_dir') licenses_data_dir = self.get_test_loc('cache/data/licenses', copy=True) rules_data_dir = self.get_test_loc('cache/data/rules', copy=True) # now add some file in the mock source tree new_file = os.path.join(tree_base_dir, 'some.py') with open(new_file, 'wb') as nf: nf.write('somthing') timeout = 10 assert not os.path.exists(checksum_file) assert not os.path.exists(cache_file) assert not os.path.exists(lock_file) # when a new index is built, new index files are created check_consistency = True cache.get_cached_index(cache_dir, check_consistency, timeout, tree_base_dir, licenses_data_dir, rules_data_dir) assert os.path.exists(checksum_file) assert os.path.exists(cache_file) assert not os.path.exists(lock_file) # when nothing changed a new index files is not created tree_before = open(checksum_file).read() idx_checksum_before = hash.sha1(cache_file) cache.get_cached_index(cache_dir, check_consistency, timeout, tree_base_dir, licenses_data_dir, rules_data_dir) assert tree_before == open(checksum_file).read() assert idx_checksum_before == hash.sha1(cache_file) # now add some file in the source tree new_file = os.path.join(tree_base_dir, 'some file') with open(new_file, 'wb') as nf: nf.write('somthing') # when check_consistency is False, the index is not rebuild when # new files are added check_consistency = False cache.get_cached_index(cache_dir, check_consistency, timeout, tree_base_dir, licenses_data_dir, rules_data_dir) assert tree_before == open(checksum_file).read() assert idx_checksum_before == hash.sha1(cache_file) # when check_consistency is True, the index is rebuilt when new # files are added check_consistency = True cache.get_cached_index(cache_dir, check_consistency, timeout, tree_base_dir, licenses_data_dir, rules_data_dir) assert tree_before != open(checksum_file).read() # now add some ignored file in the source tree tree_before = open(checksum_file).read() idx_checksum_before = hash.sha1(cache_file) new_file = os.path.join(tree_base_dir, 'some file.pyc') with open(new_file, 'wb') as nf: nf.write('somthing') # when check_consistency is True, the index is not rebuilt when new # files are added that are ignored check_consistency = True cache.get_cached_index(cache_dir, check_consistency, timeout, tree_base_dir, licenses_data_dir, rules_data_dir) assert tree_before == open(checksum_file).read() assert idx_checksum_before == hash.sha1(cache_file) # if the treechecksum file dies, the index is rebuilt fileutils.delete(checksum_file) idx_checksum_before = hash.sha1(cache_file) check_consistency = False cache.get_cached_index(cache_dir, check_consistency, timeout, tree_base_dir, licenses_data_dir, rules_data_dir) assert tree_before == open(checksum_file).read() # if the index cache file dies the index is rebuilt fileutils.delete(cache_file) check_consistency = False idx1 = cache.get_cached_index(cache_dir, check_consistency, timeout, tree_base_dir, licenses_data_dir, rules_data_dir) # load index, forced from file idx2 = cache.load_index(cache_file) assert idx1.to_dict(True) == idx2.to_dict(True) # reset global caches cache._LICENSE_SYMBOLS_BY_SPDX_KEY = {} cache._LICENSES_BY_KEY_INDEX = None cache._UNKNOWN_SPDX_SYMBOL = None cache._LICENSES_BY_KEY = None