Esempio n. 1
0
def test_clean_and_packages(clear_cache):
    pkg = "bzip2"

    with make_temp_package_cache() as pkgs_dir:
        # pkg doesn't exist ahead of time
        assert_not_pkg(pkg, _get_pkgs(pkgs_dir))

        with make_temp_env(pkg) as prefix:
            # pkg exists
            assert_any_pkg(pkg, _get_pkgs(pkgs_dir))

            # --json flag is regression test for #5451
            stdout, _, _ = run_command(Commands.CLEAN, "", "--packages",
                                       "--yes", "--json")
            json_loads(stdout)  # assert valid json

            # pkg still exists since its in use by temp env
            assert_any_pkg(pkg, _get_pkgs(pkgs_dir))

            run_command(Commands.REMOVE, prefix, pkg, "--yes", "--json")
            stdout, _, _ = run_command(Commands.CLEAN, "", "--packages",
                                       "--yes", "--json")
            json_loads(stdout)  # assert valid json

            # pkg is removed
            assert_not_pkg(pkg, _get_pkgs(pkgs_dir))

        # pkg is still removed
        assert_not_pkg(pkg, _get_pkgs(pkgs_dir))
Esempio n. 2
0
def test_tar_bz2_in_cache_not_extracted():
    """
    Test that if a .tar.bz2 exists in the package cache (not extracted), and the complementary
    .conda package is requested, the .tar.bz2 package in the cache is used by default.
    """
    with make_temp_package_cache() as pkgs_dir:
        copy(join(CHANNEL_DIR, subdir, zlib_tar_bz2_fn),
             join(pkgs_dir, zlib_tar_bz2_fn))
        pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, ))
        pfe.prepare()
        assert len(pfe.cache_actions) == 1
        assert len(pfe.extract_actions) == 1

        pfe.execute()

        pkgs_dir_files = listdir(pkgs_dir)
        assert zlib_base_fn in pkgs_dir_files
        assert zlib_tar_bz2_fn in pkgs_dir_files

        # Now ensure download/extract for the complementary .conda package uses the
        # extracted .tar.bz2
        pfe = ProgressiveFetchExtract((zlib_conda_prec, ))
        pfe.prepare()
        assert len(pfe.cache_actions) == 0
        assert len(pfe.extract_actions) == 0
Esempio n. 3
0
def test_clean_logfiles(clear_cache):
    """Logfiles are found in pkgs_dir/.logs.

    Since these log files are uniquely implemented for the experimental libmamba release we will
    mock the log files.
    """
    pkg = "bzip2"

    with make_temp_package_cache() as pkgs_dir:
        # logfiles don't exist ahead of time
        assert not _get_logfiles(pkgs_dir)

        with make_temp_env(pkg):
            # mimic logfiles being created
            logs = join(pkgs_dir, CONDA_LOGS_DIR)
            mkdir_p(logs)
            path = join(logs, f"{datetime.utcnow():%Y%m%d-%H%M%S-%f}.log")
            with open(path, "w"):
                pass

            # logfiles exist
            assert path in _get_logfiles(pkgs_dir)

            # --json flag is regression test for #5451
            stdout, _, _ = run_command(Commands.CLEAN, "", "--logfiles",
                                       "--yes", "--json")
            json_loads(stdout)  # assert valid json

            # logfiles removed
            assert not _get_logfiles(pkgs_dir)

        # logfiles still removed
        assert not _get_logfiles(pkgs_dir)
Esempio n. 4
0
def test_tar_bz2_in_pkg_cache_doesnt_overwrite_conda_pkg():
    """
    Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the
    complementary .conda package replaces it if that's what is requested.
    """
    with env_vars({'CONDA_SEPARATE_FORMAT_CACHE': True},
                  stack_callback=conda_tests_ctxt_mgmt_def_pol):
        with make_temp_package_cache() as pkgs_dir:
            # Cache the .tar.bz2 file in the package cache and extract it
            pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, ))
            pfe.prepare()
            assert len(pfe.cache_actions) == 1
            assert len(pfe.extract_actions) == 1
            cache_action = pfe.cache_actions[0]
            extact_action = pfe.extract_actions[0]
            assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn
            assert cache_action.target_full_path == extact_action.source_full_path
            assert basename(extact_action.target_full_path) == zlib_base_fn

            # Go ahead with executing download and extract now
            pfe.execute()

            assert isfile(join(pkgs_dir, zlib_tar_bz2_fn))
            assert isfile(
                join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json"))

            # Ensure second download/extract is a no-op
            pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, ))
            pfe.prepare()
            assert len(pfe.cache_actions) == 0
            assert len(pfe.extract_actions) == 0

            # Now ensure download/extract for the complementary .conda package replaces the
            # extracted .tar.bz2
            pfe = ProgressiveFetchExtract((zlib_conda_prec, ))
            pfe.prepare()
            assert len(pfe.cache_actions) == 1
            assert len(pfe.extract_actions) == 1
            cache_action = pfe.cache_actions[0]
            extact_action = pfe.extract_actions[0]
            assert basename(cache_action.target_full_path) == zlib_conda_fn
            assert cache_action.target_full_path == extact_action.source_full_path
            assert basename(extact_action.target_full_path) == zlib_base_fn

            pfe.execute()

            with open(
                    join(pkgs_dir, zlib_base_fn, "info",
                         "repodata_record.json")) as fh:
                repodata_record = json.load(fh)
            assert repodata_record["fn"] == zlib_conda_fn

            # Now check urls.txt to make sure extensions are included.
            urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt")))
            assert urls_text[0] == zlib_tar_bz2_prec.url
            assert urls_text[1] == zlib_conda_prec.url
Esempio n. 5
0
def test_instantiating_package_cache_when_both_tar_bz2_and_conda_exist_read_only(
):
    """
    If both .tar.bz2 and .conda packages exist in a read-only package cache, but neither is
    unpacked, the .conda package should be preferred and pcrec loaded from that package.
    """
    with make_temp_package_cache() as pkgs_dir:
        # instantiate to create magic file
        PackageCacheData(pkgs_dir)

        # copy .tar.bz2 to package cache
        cache_action = CacheUrlAction(
            "%s/%s/%s" % (CONDA_PKG_REPO, subdir, zlib_tar_bz2_fn),
            pkgs_dir,
            zlib_tar_bz2_fn,
        )
        cache_action.verify()
        cache_action.execute()
        cache_action.cleanup()

        # copy .conda to package cache
        cache_action = CacheUrlAction(
            "%s/%s/%s" % (CONDA_PKG_REPO, subdir, zlib_conda_fn),
            pkgs_dir,
            zlib_conda_fn,
        )
        cache_action.verify()
        cache_action.execute()
        cache_action.cleanup()

        make_read_only(join(pkgs_dir, PACKAGE_CACHE_MAGIC_FILE))
        PackageCacheData._cache_.clear()

        pcd = PackageCacheData(pkgs_dir)
        pcrecs = tuple(pcd.iter_records())
        assert len(pcrecs) == 1
        pcrec = pcrecs[0]

        # no repodata_record.json file should be created
        assert not isfile(
            join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json"))

        assert pcrec.fn == zlib_conda_fn
        assert pcrec.md5 == "edad165fc3d25636d4f0a61c42873fbc"
        assert pcrec.size == 112305

        pkgs_dir_files = listdir(pkgs_dir)
        assert zlib_base_fn not in pkgs_dir_files
        assert zlib_tar_bz2_fn in pkgs_dir_files
        assert zlib_conda_fn in pkgs_dir_files
Esempio n. 6
0
def test_clean_all(clear_cache):
    pkg = "bzip2"

    with make_temp_package_cache() as pkgs_dir:
        # pkg, tarball, & index cache doesn't exist ahead of time
        pkgs, tars, cache = _get_all(pkgs_dir)
        assert_not_pkg(pkg, pkgs)
        assert_not_pkg(pkg, tars)
        assert not cache

        with make_temp_env(pkg) as prefix:
            # pkg, tarball, & index cache exists
            pkgs, tars, cache = _get_all(pkgs_dir)
            assert_any_pkg(pkg, pkgs)
            assert_any_pkg(pkg, tars)
            assert cache

            stdout, _, _ = run_command(Commands.CLEAN, "", "--all", "--yes",
                                       "--json")
            json_loads(stdout)  # assert valid json

            # pkg still exists since its in use by temp env
            # tarball is removed
            # index cache is cleared
            pkgs, tars, cache = _get_all(pkgs_dir)
            assert_any_pkg(pkg, pkgs)
            assert_not_pkg(pkg, tars)
            assert not cache

            run_command(Commands.REMOVE, prefix, pkg, "--yes", "--json")
            stdout, _, _ = run_command(Commands.CLEAN, "", "--packages",
                                       "--yes", "--json")
            json_loads(stdout)  # assert valid json

            # pkg is removed
            # tarball is still removed
            # index cache is still cleared
            pkgs, tars, index_cache = _get_all(pkgs_dir)
            assert_not_pkg(pkg, pkgs)
            assert_not_pkg(pkg, tars)
            assert not cache

        # pkg is still removed
        # tarball is still removed
        # index cache is still cleared
        pkgs, tars, index_cache = _get_all(pkgs_dir)
        assert_not_pkg(pkg, pkgs)
        assert_not_pkg(pkg, tars)
        assert not cache
Esempio n. 7
0
def test_clean_force_pkgs_dirs(clear_cache):
    pkg = "bzip2"

    with make_temp_package_cache() as pkgs_dir:
        # pkgs_dir is a directory
        assert isdir(pkgs_dir)

        with make_temp_env(pkg):
            stdout, _, _ = run_command(Commands.CLEAN, "", "--force-pkgs-dirs",
                                       "--yes", "--json")
            json_loads(stdout)  # assert valid json

            # pkgs_dir is removed
            assert not exists(pkgs_dir)

        # pkgs_dir is still removed
        assert not exists(pkgs_dir)
Esempio n. 8
0
def test_instantiating_package_cache_when_both_tar_bz2_and_conda_exist():
    """
    If both .tar.bz2 and .conda packages exist in a writable package cache, but neither is
    unpacked, the .conda package should be preferred and unpacked in place.
    """
    with make_temp_package_cache() as pkgs_dir:
        # copy .tar.bz2 to package cache
        cache_action = CacheUrlAction(
            "%s/%s/%s" % (CONDA_PKG_REPO, subdir, zlib_tar_bz2_fn),
            pkgs_dir,
            zlib_tar_bz2_fn,
        )
        cache_action.verify()
        cache_action.execute()
        cache_action.cleanup()

        # copy .conda to package cache
        cache_action = CacheUrlAction(
            "%s/%s/%s" % (CONDA_PKG_REPO, subdir, zlib_conda_fn),
            pkgs_dir,
            zlib_conda_fn,
        )
        cache_action.verify()
        cache_action.execute()
        cache_action.cleanup()

        PackageCacheData._cache_.clear()
        pcd = PackageCacheData(pkgs_dir)
        pcrecs = tuple(pcd.iter_records())
        assert len(pcrecs) == 1
        pcrec = pcrecs[0]

        # ensure the package was actually extracted by presence of repodata_record.json
        with open(join(pkgs_dir, zlib_base_fn, "info",
                       "repodata_record.json")) as fh:
            repodata_record = json.load(fh)

        assert pcrec.fn == zlib_conda_fn == repodata_record["fn"]
        assert pcrec.md5 == repodata_record["md5"]

        pkgs_dir_files = listdir(pkgs_dir)
        assert zlib_base_fn in pkgs_dir_files
        assert zlib_tar_bz2_fn in pkgs_dir_files
        assert zlib_conda_fn in pkgs_dir_files
Esempio n. 9
0
def test_clean_index_cache(clear_cache):
    pkg = "bzip2"

    with make_temp_package_cache():
        # index cache doesn't exist ahead of time
        assert not _get_index_cache()

        with make_temp_env(pkg):
            # index cache exists
            assert _get_index_cache()

            stdout, _, _ = run_command(Commands.CLEAN, "", "--index-cache",
                                       "--yes", "--json")
            json_loads(stdout)  # assert valid json

            # index cache is cleared
            assert not _get_index_cache()

        # index cache is still cleared
        assert not _get_index_cache()
Esempio n. 10
0
def test_clean_tarballs(clear_cache):
    pkg = "bzip2"

    with make_temp_package_cache() as pkgs_dir:
        # tarball doesn't exist ahead of time
        assert_not_pkg(pkg, _get_tars(pkgs_dir))

        with make_temp_env(pkg):
            # tarball exists
            assert_any_pkg(pkg, _get_tars(pkgs_dir))

            # --json flag is regression test for #5451
            stdout, _, _ = run_command(Commands.CLEAN, "", "--tarballs",
                                       "--yes", "--json")
            json_loads(stdout)  # assert valid json

            # tarball is removed
            assert_not_pkg(pkg, _get_tars(pkgs_dir))

        # tarball is still removed
        assert_not_pkg(pkg, _get_tars(pkgs_dir))
Esempio n. 11
0
def test_tar_bz2_in_pkg_cache_used_instead_of_conda_pkg():
    """
    Test that if a .tar.bz2 package is downloaded and extracted in a package cache, the
    complementary .conda package is not downloaded/extracted
    """
    with make_temp_package_cache() as pkgs_dir:
        # Cache the .tar.bz2 file in the package cache and extract it
        pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, ))
        pfe.prepare()
        assert len(pfe.cache_actions) == 1
        assert len(pfe.extract_actions) == 1
        cache_action = pfe.cache_actions[0]
        extact_action = pfe.extract_actions[0]
        assert basename(cache_action.target_full_path) == zlib_tar_bz2_fn
        assert cache_action.target_full_path == extact_action.source_full_path
        assert basename(extact_action.target_full_path) == zlib_base_fn

        # Go ahead with executing download and extract now
        pfe.execute()

        assert isfile(join(pkgs_dir, zlib_tar_bz2_fn))
        assert isfile(
            join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json"))

        # Ensure second download/extract is a no-op
        pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec, ))
        pfe.prepare()
        assert len(pfe.cache_actions) == 0
        assert len(pfe.extract_actions) == 0

        # Now ensure download/extract for the complementary .conda package uses the cache
        pfe = ProgressiveFetchExtract((zlib_conda_prec, ))
        pfe.prepare()
        assert len(pfe.cache_actions) == 0
        assert len(pfe.extract_actions) == 0

        # Now check urls.txt to make sure extensions are included.
        urls_text = tuple(yield_lines(join(pkgs_dir, "urls.txt")))
        assert urls_text[0] == zlib_tar_bz2_prec.url
Esempio n. 12
0
def test_clean_tempfiles(clear_cache):
    """Tempfiles are either suffixed with .c~ or .trash.

    .c~ is used to indicate that conda is actively using that file. If the conda process is
    terminated unexpectedly these .c~ files may remain and hence can be cleaned up after the fact.

    .trash appears to be a legacy suffix that is no longer used by conda.

    Since the presence of .c~ and .trash files are dependent upon irregular termination we create
    our own temporary files to confirm they get cleaned up.
    """
    pkg = "bzip2"

    with make_temp_package_cache() as pkgs_dir:
        # tempfiles don't exist ahead of time
        assert not _get_tempfiles(pkgs_dir)

        with make_temp_env(pkg):
            # mimic tempfiles being created
            path = _get_tars(pkgs_dir)[0]  # grab any tarball
            for ext in CONDA_TEMP_EXTENSIONS:
                copy(path, f"{path}{ext}")

            # tempfiles exist
            assert len(_get_tempfiles(pkgs_dir)) == len(CONDA_TEMP_EXTENSIONS)

            # --json flag is regression test for #5451
            stdout, _, _ = run_command(Commands.CLEAN, "", "--tempfiles",
                                       pkgs_dir, "--yes", "--json")
            json_loads(stdout)  # assert valid json

            # tempfiles removed
            assert not _get_tempfiles(pkgs_dir)

        # tempfiles still removed
        assert not _get_tempfiles(pkgs_dir)