def test_register_unregister_location_env(self): user_environments_txt_file = get_user_environments_txt_file() if (not os.path.exists(user_environments_txt_file) or user_environments_txt_file == os.devnull): pytest.skip('user environments.txt file {} does not exist'.format( user_environments_txt_file)) gascon_location = join(self.prefix, 'gascon') touch(join(gascon_location, PREFIX_MAGIC_FILE), mkdir=True) assert gascon_location not in list_all_known_prefixes() touch(user_environments_txt_file, mkdir=True, sudo_safe=True) register_env(gascon_location) assert gascon_location in yield_lines(user_environments_txt_file) assert len( tuple(x for x in yield_lines(user_environments_txt_file) if paths_equal(gascon_location, x))) == 1 register_env(gascon_location) # should be completely idempotent assert len( tuple(x for x in yield_lines(user_environments_txt_file) if x == gascon_location)) == 1 unregister_env(gascon_location) assert gascon_location not in list_all_known_prefixes() unregister_env(gascon_location) # should be idempotent assert gascon_location not in list_all_known_prefixes()
def test_tar_bz2_in_pkg_cache_doesnt_overwrite_conda_pkg(): """ Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the complementary .conda package replaces it if that's what is requested. """ with env_vars({'CONDA_SEPARATE_FORMAT_CACHE': True}, stack_callback=conda_tests_ctxt_mgmt_def_pol): with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn # Go ahead with executing download and extract now pfe.execute() assert isfile(join(pkgs_dir, zlib_tar_bz2_fn)) assert isfile( join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) # Ensure second download/extract is a no-op pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now ensure download/extract for the complementary .conda package replaces the # extracted .tar.bz2 pfe = ProgressiveFetchExtract((zlib_conda_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_conda_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn pfe.execute() with open( join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) as fh: repodata_record = json.load(fh) assert repodata_record["fn"] == zlib_conda_fn # Now check urls.txt to make sure extensions are included. urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt"))) assert urls_text[0] == zlib_tar_bz2_prec.url assert urls_text[1] == zlib_conda_prec.url
def test_register_unregister_location_env(self): gascon_location = join(self.prefix, 'gascon') touch(join(gascon_location, PREFIX_MAGIC_FILE), mkdir=True) assert gascon_location not in list_all_known_prefixes() touch(USER_ENVIRONMENTS_TXT_FILE, mkdir=True, sudo_safe=True) register_env(gascon_location) assert gascon_location in yield_lines(USER_ENVIRONMENTS_TXT_FILE) assert len( tuple(x for x in yield_lines(USER_ENVIRONMENTS_TXT_FILE) if paths_equal(gascon_location, x))) == 1 register_env(gascon_location) # should be completely idempotent assert len( tuple(x for x in yield_lines(USER_ENVIRONMENTS_TXT_FILE) if x == gascon_location)) == 1 unregister_env(gascon_location) assert gascon_location not in list_all_known_prefixes() unregister_env(gascon_location) # should be idempotent assert gascon_location not in list_all_known_prefixes()
def test_tar_bz2_in_pkg_cache_doesnt_overwrite_conda_pkg(): """ Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the complementary .conda package replaces it if that's what is requested. """ with env_vars({'CONDA_SEPARATE_FORMAT_CACHE': True}, stack_callback=conda_tests_ctxt_mgmt_def_pol): with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn # Go ahead with executing download and extract now pfe.execute() assert isfile(join(pkgs_dir, zlib_tar_bz2_fn)) assert isfile(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) # Ensure second download/extract is a no-op pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now ensure download/extract for the complementary .conda package replaces the # extracted .tar.bz2 pfe = ProgressiveFetchExtract((zlib_conda_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_conda_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn pfe.execute() with open(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) as fh: repodata_record = json.load(fh) assert repodata_record["fn"] == zlib_conda_fn # Now check urls.txt to make sure extensions are included. urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt"))) assert urls_text[0] == zlib_tar_bz2_prec.url assert urls_text[1] == zlib_conda_prec.url
def test_register_unregister_location_env(self): user_environments_txt_file = get_user_environments_txt_file() if (not os.path.exists(user_environments_txt_file) or user_environments_txt_file == os.devnull): pytest.skip('user environments.txt file {} does not exist'.format(user_environments_txt_file)) gascon_location = join(self.prefix, 'gascon') touch(join(gascon_location, PREFIX_MAGIC_FILE), mkdir=True) assert gascon_location not in list_all_known_prefixes() touch(user_environments_txt_file, mkdir=True, sudo_safe=True) register_env(gascon_location) assert gascon_location in yield_lines(user_environments_txt_file) assert len(tuple(x for x in yield_lines(user_environments_txt_file) if paths_equal(gascon_location, x))) == 1 register_env(gascon_location) # should be completely idempotent assert len(tuple(x for x in yield_lines(user_environments_txt_file) if x == gascon_location)) == 1 unregister_env(gascon_location) assert gascon_location not in list_all_known_prefixes() unregister_env(gascon_location) # should be idempotent assert gascon_location not in list_all_known_prefixes()
def test_tar_bz2_in_pkg_cache_used_instead_of_conda_pkg(): """ Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the complementary .conda package is not downloaded/extracted """ with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn # Go ahead with executing download and extract now pfe.execute() assert isfile(join(pkgs_dir, zlib_tar_bz2_fn)) assert isfile( join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) # Ensure second download/extract is a no-op pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now ensure download/extract for the complementary .conda package uses the cache pfe = ProgressiveFetchExtract((zlib_conda_prec, )) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now check urls.txt to make sure extensions are included. urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt"))) assert urls_text[0] == zlib_tar_bz2_prec.url
def test_tar_bz2_in_pkg_cache_used_instead_of_conda_pkg(): """ Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the complementary .conda package is not downloaded/extracted """ with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 1 assert len(pfe.extract_actions) == 1 cache_action = pfe.cache_actions[0] extact_action = pfe.extract_actions[0] assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn assert cache_action.target_full_path == extact_action.source_full_path assert basename(extact_action.target_full_path) == zlib_base_fn # Go ahead with executing download and extract now pfe.execute() assert isfile(join(pkgs_dir, zlib_tar_bz2_fn)) assert isfile(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) # Ensure second download/extract is a no-op pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now ensure download/extract for the complementary .conda package uses the cache pfe = ProgressiveFetchExtract((zlib_conda_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 0 assert len(pfe.extract_actions) == 0 # Now check urls.txt to make sure extensions are included. urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt"))) assert urls_text[0] == zlib_tar_bz2_prec.url
def test_yield_lines(tmpdir): tempfile = join(text_type(tmpdir), "testfile") _make_lines_file(tempfile) lines = list(yield_lines(tempfile)) assert lines == ['line 1', 'line 2', 'line 4']