def test_tar_bz2_in_pkg_cache_doesnt_overwrite_conda_pkg(): """ Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the complementary .conda package replaces it if that's what is requested. """ with env_vars({'CONDA_SEPARATE_FORMAT_CACHE': True}, stack_callback=conda_tests_ctxt_mgmt_def_pol): with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn # Go ahead with executing download and extract now pfe.execute() assert isfile(join(pkgs_dir, zlib_tar_bz2_fn)) assert isfile( join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) # Ensure second download/extract is a no-op pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now ensure download/extract for the complementary .conda package replaces the # extracted .tar.bz2 pfe = ProgressiveFetchExtract((zlib_conda_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_conda_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn pfe.execute() with open( join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) as fh: repodata_record = json.load(fh) assert repodata_record["fn"] == zlib_conda_fn # Now check urls.txt to make sure extensions are included. urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt"))) assert urls_text[0] == zlib_tar_bz2_prec.url assert urls_text[1] == zlib_conda_prec.url
def test_tar_bz2_in_pkg_cache_doesnt_overwrite_conda_pkg(): """ Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the complementary .conda package replaces it if that's what is requested. """ with env_vars({'CONDA_SEPARATE_FORMAT_CACHE': True}, stack_callback=conda_tests_ctxt_mgmt_def_pol): with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn # Go ahead with executing download and extract now pfe.execute() assert isfile(join(pkgs_dir, zlib_tar_bz2_fn)) assert isfile(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) # Ensure second download/extract is a no-op pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now ensure download/extract for the complementary .conda package replaces the # extracted .tar.bz2 pfe = ProgressiveFetchExtract((zlib_conda_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_conda_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn pfe.execute() with open(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) as fh: repodata_record = json.load(fh) assert repodata_record["fn"] == zlib_conda_fn # Now check urls.txt to make sure extensions are included. urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt"))) assert urls_text[0] == zlib_tar_bz2_prec.url assert urls_text[1] == zlib_conda_prec.url
def test_tar_bz2_in_pkg_cache_used_instead_of_conda_pkg(): """ Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the complementary .conda package is not downloaded/extracted """ with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn # Go ahead with executing download and extract now pfe.execute() assert isfile(join(pkgs_dir, zlib_tar_bz2_fn)) assert isfile( join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) # Ensure second download/extract is a no-op pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now ensure download/extract for the complementary .conda package uses the cache pfe = ProgressiveFetchExtract((zlib_conda_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now check urls.txt to make sure extensions are included. urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt"))) assert urls_text[0] == zlib_tar_bz2_prec.url
def test_tar_bz2_in_pkg_cache_used_instead_of_conda_pkg(): """ Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the complementary .conda package is not downloaded/extracted """ with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn # Go ahead with executing download and extract now pfe.execute() assert isfile(join(pkgs_dir, zlib_tar_bz2_fn)) assert isfile(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) # Ensure second download/extract is a no-op pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now ensure download/extract for the complementary .conda package uses the cache pfe = ProgressiveFetchExtract((zlib_conda_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now check urls.txt to make sure extensions are included. urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt"))) assert urls_text[0] == zlib_tar_bz2_prec.url
def test_instantiating_package_cache_when_both_tar_bz2_and_conda_exist_read_only( ): """ If both .tar.bz2 and .conda packages exist in a read-only package cache, but neither is unpacked, the .conda package should be preferred and pcrec loaded from that package. """ with make_temp_package_cache() as pkgs_dir: # instantiate to create magic file PackageCacheData(pkgs_dir) # copy .tar.bz2 to package cache cache_action = CacheUrlAction( "%s/%s/%s" % (CONDA_PKG_REPO, subdir, zlib_tar_bz2_fn), pkgs_dir, zlib_tar_bz2_fn, ) cache_action.verify() cache_action.execute() cache_action.cleanup() # copy .conda to package cache cache_action = CacheUrlAction( "%s/%s/%s" % (CONDA_PKG_REPO, subdir, zlib_conda_fn), pkgs_dir, zlib_conda_fn, ) cache_action.verify() cache_action.execute() cache_action.cleanup() make_read_only(join(pkgs_dir, PACKAGE_CACHE_MAGIC_FILE)) PackageCacheData._cache_.clear() pcd = PackageCacheData(pkgs_dir) pcrecs = tuple(pcd.iter_records()) assert len(pcrecs) == 1 pcrec = pcrecs[0] # no repodata_record.json file should be created assert not isfile( join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) assert pcrec.fn == zlib_conda_fn assert pcrec.md5 == "edad165fc3d25636d4f0a61c42873fbc" assert pcrec.size == 112305 pkgs_dir_files = listdir(pkgs_dir) assert zlib_base_fn not in pkgs_dir_files assert zlib_tar_bz2_fn in pkgs_dir_files assert zlib_conda_fn in pkgs_dir_files
def test_instantiating_package_cache_when_both_tar_bz2_and_conda_exist_read_only(): """ If both .tar.bz2 and .conda packages exist in a read-only package cache, but neither is unpacked, the .conda package should be preferred and pcrec loaded from that package. """ with make_temp_package_cache() as pkgs_dir: # instantiate to create magic file PackageCacheData(pkgs_dir) # copy .tar.bz2 to package cache cache_action = CacheUrlAction( "%s/%s/%s" % (CONDA_PKG_REPO, subdir, zlib_tar_bz2_fn), pkgs_dir, zlib_tar_bz2_fn, ) cache_action.verify() cache_action.execute() cache_action.cleanup() # copy .conda to package cache cache_action = CacheUrlAction( "%s/%s/%s" % (CONDA_PKG_REPO, subdir, zlib_conda_fn), pkgs_dir, zlib_conda_fn, ) cache_action.verify() cache_action.execute() cache_action.cleanup() make_read_only(join(pkgs_dir, PACKAGE_CACHE_MAGIC_FILE)) PackageCacheData._cache_.clear() pcd = PackageCacheData(pkgs_dir) pcrecs = tuple(pcd.iter_records()) assert len(pcrecs) == 1 pcrec = pcrecs[0] # no repodata_record.json file should be created assert not isfile(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) assert pcrec.fn == zlib_conda_fn assert pcrec.md5 == "edad165fc3d25636d4f0a61c42873fbc" assert pcrec.size == 112305 pkgs_dir_files = listdir(pkgs_dir) assert zlib_base_fn not in pkgs_dir_files assert zlib_tar_bz2_fn in pkgs_dir_files assert zlib_conda_fn in pkgs_dir_files