Exemplo n.º 1
0
def rm_lock(locks, verbose=True):
    from ..install import rm_rf

    for path in locks:
        if verbose:
            print("removing: %s" % path)
        rm_rf(path)
Exemplo n.º 2
0
def rm_pkgs(args):
    # TODO: This doesn't handle packages that have hard links to files within
    # themselves, like bin/python3.3 and bin/python3.3m in the Python package
    from os.path import join, isdir
    from os import lstat, walk, listdir
    from conda.install import rm_rf

    pkgs_dir = config.pkgs_dirs[0]
    print('Cache location: %s' % pkgs_dir)

    rmlist = []
    pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
        # Only include actual packages
        isdir(join(pkgs_dir, i, 'info'))]
    for pkg in pkgs:
        breakit = False
        for root, dir, files in walk(join(pkgs_dir, pkg)):
            if breakit:
                break
            for fn in files:
                try:
                    stat = lstat(join(root, fn))
                except OSError as e:
                    print(e)
                    continue
                if stat.st_nlink > 1:
                    # print('%s is installed: %s' % (pkg, join(root, fn)))
                    breakit = True
                    break
        else:
            rmlist.append(pkg)

    if not rmlist:
        print("There are no unused packages to remove")
        sys.exit(0)

    print("Will remove the following packages:")
    print()
    totalsize = 0
    maxlen = len(max(rmlist, key=lambda x: len(str(x))))
    fmt = "%-40s %10s"
    for pkg in rmlist:
        pkgsize = 0
        for root, dir, files in walk(join(pkgs_dir, pkg)):
            for fn in files:
                # We don't have to worry about counting things twice:  by
                # definition these files all have a link count of 1!
                size = lstat(join(root, fn)).st_size
                totalsize += size
                pkgsize += size
        print(fmt % (pkg, human_bytes(pkgsize)))
    print('-' * (maxlen + 2 + 10))
    print(fmt % ('Total:', human_bytes(totalsize)))
    print()

    common.confirm_yn(args)

    for pkg in rmlist:
        print("removing %s" % pkg)
        rm_rf(join(pkgs_dir, pkg))
Exemplo n.º 3
0
def make_recipe(package, version, noarch_python=False):
    if version is None:
        release = client.package_releases(package)
        if len(release) > 0:
            version = release[0]
        else:
            raise RuntimeError("Empty releases for %s" % package)
    depends = get_all_dependencies(package, version)
    dirname = package.lower() + "-" + version
    if os.path.isdir(dirname):
        rm_rf(dirname)
    os.mkdir(dirname)
    direc = os.path.abspath(dirname)
    build = 'pip install %s==%s\n' % (package, version)
    # write build.sh file and bld.bat file
    filenames = ['build.sh', 'bld.bat']
    for name in filenames:
        with open(os.path.join(direc, name), 'w') as fid:
            fid.write(build)

    indent = '\n    - '
    d = {}
    d['packagename'] = package
    d['version'] = version
    if depends:
        d['depends'] = indent.join([''] + depends)
    else:
        d['depends'] = ''

    data = client.release_data(package, version)
    if not data:
        raise RuntimeError("Cannot get data for %s-%s" % (package, version))

    license_classifier = "License :: OSI Approved ::"
    if 'classifiers' in data:
        licenses = [classifier.lstrip(license_classifier) for classifier in
                    data['classifiers'] if classifier.startswith(license_classifier)]
    else:
        licenses = []

    if not licenses:
        license = data.get('license', 'UNKNOWN') or 'UNKNOWN'
    else:
        license = ' or '.join(licenses)

    d['homeurl'] = data['home_page']
    d['license'] = license
    d['summary'] = repr(data['summary'])

    if noarch_python:
        d['build_comment'] = ''
        d['noarch_python_comment'] = ''
    else:
        d['build_comment'] = '# '
        d['noarch_python_comment'] = '# '

    with open(os.path.join(direc, 'meta.yaml'), 'w') as fid:
        fid.write(meta_template.format(**d))

    return direc, depends
Exemplo n.º 4
0
def explicit(urls, prefix, verbose=True):
    import conda.fetch as fetch
    from conda.utils import md5_file

    dists = []
    for url in urls:
        if url == '@EXPLICIT':
            continue
        print("Fetching: %s" % url)
        channel_url, fn = url.rsplit('/', 1)
        dists.append(fn[:-8])
        index = fetch.fetch_index((channel_url + '/',))
        info = index[fn]
        pkg_path = join(config.pkgs_dirs[0], fn)
        if isfile(pkg_path):
            try:
                if md5_file(pkg_path) != index[fn]['md5']:
                    install.rm_rf(pkg_path)
                    fetch.fetch_pkg(info)
            except KeyError:
                sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
        else:
            fetch.fetch_pkg(info)

    force_extract_and_link(dists, prefix, verbose=verbose)
Exemplo n.º 5
0
 def test_raises_final_exception_if_it_cant_remove(self):
     with self.generate_directory_mocks(on_win=True) as mocks:
         mocks['rmtree'].side_effect = OSError
         mocks['rename'].side_effect = OSError
         some_path = self.generate_random_path
         with self.assertRaises(OSError):
             install.rm_rf(some_path, trash=False)
Exemplo n.º 6
0
def get_pkginfo(package, filename, pypiurl, md5, python_version):
    # Unfortunately, two important pieces of metadata are only stored in
    # the package itself: the dependencies, and the entry points (if the
    # package uses distribute).  Our strategy is to download the package
    # and "fake" distribute/setuptools's setup() function to get this
    # information from setup.py. If this sounds evil, keep in mind that
    # distribute itself already works by monkeypatching distutils.
    tempdir = mkdtemp('conda_skeleton_' + filename)

    if not isdir(SRC_CACHE):
        makedirs(SRC_CACHE)

    try:
        # Download it to the build source cache. That way, you have
        # it.
        download_path = join(SRC_CACHE, filename)
        if not isfile(download_path) or \
                hashsum_file(download_path, 'md5') != md5:
            download(pypiurl, join(SRC_CACHE, filename))
        else:
            print("Using cached download")
        print("Unpacking %s..." % package)
        unpack(join(SRC_CACHE, filename), tempdir)
        print("done")
        print("working in %s" % tempdir)
        src_dir = get_dir(tempdir)
        # TODO: find args parameters needed by run_setuppy
        run_setuppy(src_dir, tempdir, python_version)
        with open(join(tempdir, 'pkginfo.yaml')) as fn:
            pkginfo = yaml.load(fn)
    finally:
        rm_rf(tempdir)

    return pkginfo
Exemplo n.º 7
0
 def test_raises_final_exception_if_it_cant_remove(self):
     with self.generate_directory_mocks(on_win=True) as mocks:
         mocks['rmtree'].side_effect = OSError
         mocks['rename'].side_effect = OSError
         some_path = self.generate_random_path
         with self.assertRaises(OSError):
             install.rm_rf(some_path, trash=False)
Exemplo n.º 8
0
    def tearDown(self):
        rm_rf("environment.yml")
        if env_is_created(test_env_name_1):
            run_env_command(Commands.ENV_REMOVE, test_env_name_1)

        if env_is_created(test_env_name_42):
            run_env_command(Commands.ENV_REMOVE, test_env_name_42)
Exemplo n.º 9
0
def rm_index_cache():
    from os.path import join

    from conda.config import pkgs_dirs
    from conda.install import rm_rf

    rm_rf(join(pkgs_dirs[0], 'cache'))
Exemplo n.º 10
0
def build_package(package, version=None):
    if conda_package_exists(package):
        return 0
    if ' ' in package:
        package, version = depend.split(' ')
    try:
        directory = build_recipe(package, version=version)
        dependencies = convert_recipe(directory, package)
    except RuntimeError:
        directory, dependencies = make_recipe(package, version)

    try:
        print("package = %s" % package)
        print("   dependences = %s" % dependencies)
        # Dependencies will be either package_name or
        #  package_name version_number
        # Only == dependency specs get version numbers
        # All else are just handled without a version spec
        for depend in dependencies:
            build_package(depend)
        args = build_template.format(directory).split()
        print("Building conda package for {0}".format(package.lower()))
        result = subprocess.Popen(args).wait()
        if result == 0 and binstar_upload:
            m = MetaData(directory)
            handle_binstar_upload(build.bldpkg_path(m))
    finally:
        rm_rf(directory)
    return result
Exemplo n.º 11
0
    def test_multi_channel_explicit(self):
        """
            When try to import from txt
            every package should come from same channel
        """
        with make_temp_env("python=3") as prefix:
            assert exists(join(prefix, PYTHON_BINARY))
            assert_package_is_installed(prefix, 'python-3')

            run_command(Commands.INSTALL, prefix, "six", "-c", "conda-forge")
            assert_package_is_installed(prefix, "six")

            output, error = run_command(Commands.LIST, prefix, "--explicit")
            self.assertIn("conda-forge", output)

            try:
                with tempfile.NamedTemporaryFile(mode="w", suffix="txt", delete=False) as env_txt:
                    env_txt.write(output)
                    env_txt.close()
                    prefix2 = make_temp_prefix()
                    run_command(Commands.CREATE, prefix2, "--file " + env_txt.name)

                    assert_package_is_installed(prefix2, "python")
                    assert_package_is_installed(prefix2, "six")
                output2, _ = run_command(Commands.LIST, prefix2, "--explicit")
                self.assertEqual(output, output2)
            finally:
                rm_rf(env_txt.name)
Exemplo n.º 12
0
def make_recipe(package, version, noarch_python=False):
    if version is None:
        release = client.package_releases(package)
        if len(release) > 0:
            version = release[0]
        else:
            raise RuntimeError("Empty releases for %s" % package)
    depends = get_all_dependencies(package, version)
    dirname = package.lower() + "-" + version
    if os.path.isdir(dirname):
        rm_rf(dirname)
    os.mkdir(dirname)
    direc = os.path.abspath(dirname)
    build = 'pip install %s==%s\n' % (package, version)
    # write build.sh file and bld.bat file
    filenames = ['build.sh', 'bld.bat']
    for name in filenames:
        with open(os.path.join(direc, name), 'w') as fid:
            fid.write(build)

    indent = '\n    - '
    d = {}
    d['packagename'] = package
    d['version'] = version
    if depends:
        d['depends'] = indent.join([''] + depends)
    else:
        d['depends'] = ''

    data = client.release_data(package, version)
    if not data:
        raise RuntimeError("Cannot get data for %s-%s" % (package, version))

    license_classifier = "License :: OSI Approved ::"
    if 'classifiers' in data:
        licenses = [classifier.lstrip(license_classifier) for classifier in
                    data['classifiers'] if classifier.startswith(license_classifier)]
    else:
        licenses = []

    if not licenses:
        license = data.get('license', 'UNKNOWN') or 'UNKNOWN'
    else:
        license = ' or '.join(licenses)

    d['homeurl'] = data['home_page']
    d['license'] = license
    d['summary'] = repr(data['summary'])

    if noarch_python:
        d['build_comment'] = ''
        d['noarch_python_comment'] = ''
    else:
        d['build_comment'] = '# '
        d['noarch_python_comment'] = '# '

    with open(os.path.join(direc, 'meta.yaml'), 'w') as fid:
        fid.write(meta_template.format(**d))

    return direc, depends
Exemplo n.º 13
0
 def test_retries_six_times_to_ensure_it_cant_really_remove(self):
     with self.generate_directory_mocks() as mocks:
         mocks['rmtree'].side_effect = OSError
         some_path = self.generate_random_path
         with self.assertRaises(OSError):
             install.rm_rf(some_path)
     self.assertEqual(6, mocks['rmtree'].call_count)
Exemplo n.º 14
0
def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
            verbose=True):
    if verbose:
        print('Cache location: %s' % pkgs_dir)
        for fn, exception in warnings:
            print(exception)

    if not rmlist:
        if verbose:
            print("There are no unused packages to remove")
        return

    if verbose:
        print("Will remove the following packages:")
        print()
        maxlen = len(max(rmlist, key=lambda x: len(str(x))))
        fmt = "%-40s %10s"
        for pkg, pkgsize in zip(rmlist, pkgsizes):
            print(fmt % (pkg, human_bytes(pkgsize)))
        print('-' * (maxlen + 2 + 10))
        print(fmt % ('Total:', human_bytes(totalsize)))
        print()

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for pkg in rmlist:
        if verbose:
            print("removing %s" % pkg)
        rm_rf(join(pkgs_dir, pkg))
Exemplo n.º 15
0
def build_package(package, version=None, noarch_python=False):
    if ' ' in package:
        package, version = package.split(' ')
    try:
        directory = build_recipe(package, version=version)
        dependencies = convert_recipe(directory, package,
                                      noarch_python=noarch_python)
    except RuntimeError:
        directory, dependencies = make_recipe(package, version,
                                              noarch_python=noarch_python)

    return_code = 0

    try:
        print("package = %s" % package)
        print("   dependencies = %s" % dependencies)
        # Dependencies will be either package_name or
        #  package_name version_number
        # Only == dependency specs get version numbers
        # All else are just handled without a version spec
        for depend in dependencies:
            build_package(depend)
        args = build_template.format(directory).split()
        print("Building conda package for {0}".format(package.lower()))

        try:
            utils.execute(args, check_exit_code=True)
        except subprocess.CalledProcessError as exc:
            return_code = exc.return_code
        else:
            m = MetaData(directory)
            handle_binstar_upload(build.bldpkg_path(m))
    finally:
        rm_rf(directory)
    return return_code
Exemplo n.º 16
0
 def tearDown(self):
     rm_rf("environment.yml")
     run_env_command(Commands.ENV_REMOVE, test_env_name_1)
     run_env_command(Commands.ENV_REMOVE, test_env_name_42)
     run_env_command(Commands.ENV_REMOVE, test_env_name_pip)
     for env_nb in range(1, 6):
         run_env_command(Commands.ENV_REMOVE, "envjson-{0}".format(env_nb))
Exemplo n.º 17
0
def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
            verbose=True):
    from conda.install import rm_rf

    if verbose:
        print('Cache location: %s' % pkgs_dir)
        for fn, exception in warnings:
            print(exception)

    if not rmlist:
        if verbose:
            print("There are no unused packages to remove")
        return

    if verbose:
        print("Will remove the following packages:")
        print()
        maxlen = len(max(rmlist, key=lambda x: len(str(x))))
        fmt = "%-40s %10s"
        for pkg, pkgsize in zip(rmlist, pkgsizes):
            print(fmt % (pkg, human_bytes(pkgsize)))
        print('-' * (maxlen + 2 + 10))
        print(fmt % ('Total:', human_bytes(totalsize)))
        print()

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for pkg in rmlist:
        if verbose:
            print("removing %s" % pkg)
        rm_rf(join(pkgs_dir, pkg))
Exemplo n.º 18
0
def explicit(urls, prefix, verbose=True):
    import conda.fetch as fetch
    from conda.utils import md5_file

    dists = []
    for url in urls:
        if url == '@EXPLICIT':
            continue
        print("Fetching: %s" % url)
        m = url_pat.match(url)
        if m is None:
            sys.exit("Error: Could not parse: %s" % url)
        fn = m.group('fn')
        dists.append(fn[:-8])
        index = fetch.fetch_index((m.group('url') + '/',))
        try:
            info = index[fn]
        except KeyError:
            sys.exit("Error: no package '%s' in index" % fn)
        if m.group('md5') and m.group('md5') != info['md5']:
            sys.exit("Error: MD5 in explicit files does not match index")
        pkg_path = join(config.pkgs_dirs[0], fn)
        if isfile(pkg_path):
            try:
                if md5_file(pkg_path) != info['md5']:
                    install.rm_rf(pkg_path)
                    fetch.fetch_pkg(info)
            except KeyError:
                sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
        else:
            fetch.fetch_pkg(info)

    force_extract_and_link(dists, prefix, verbose=verbose)
Exemplo n.º 19
0
 def test_retries_six_times_to_ensure_it_cant_really_remove(self):
     with self.generate_directory_mocks() as mocks:
         mocks['rmtree'].side_effect = OSError
         some_path = self.generate_random_path
         with self.assertRaises(OSError):
             install.rm_rf(some_path)
     self.assertEqual(6, mocks['rmtree'].call_count)
Exemplo n.º 20
0
def rm_index_cache():
    from os.path import join

    from conda.config import pkgs_dirs
    from conda.install import rm_rf

    rm_rf(join(pkgs_dirs[0], 'cache'))
Exemplo n.º 21
0
 def test_calls_rmtree_and_rename_on_win(self):
     with self.generate_directory_mocks(on_win=True) as mocks:
         some_path = self.generate_random_path
         install.rm_rf(some_path)
     assert mocks['rename'].call_count == 1
     assert mocks['rmtree'].call_count == 1
     assert mocks['rename'].call_args[0][1] == mocks['rmtree'].call_args[0][0]
Exemplo n.º 22
0
def get_outdated(output_dir, cran_metadata):
    to_update = []
    for recipe in listdir(output_dir):
        if not recipe.startswith('r-') or not isdir(recipe):
            continue

        recipe_name = recipe[2:]

        if recipe_name not in cran_metadata:
            print("Skipping %s, not found on CRAN" % recipe)
            continue

        up_to_date = version_compare(
            join(output_dir, recipe),
            cran_metadata[recipe_name]['Version'].replace('-', '_'))

        if up_to_date:
            print("%s is up-to-date." % recipe)
            continue

        print("Updating %s" % recipe)
        to_update.append(recipe_name)
        rm_rf(join(output_dir, recipe))

    return to_update
Exemplo n.º 23
0
    def test_multi_channel_explicit(self):
        """
            When try to import from txt
            every package should come from same channel
        """
        with make_temp_env("python=3") as prefix:
            assert exists(join(prefix, PYTHON_BINARY))
            assert_package_is_installed(prefix, 'python-3')

            run_command(Commands.INSTALL, prefix, "six", "-c", "conda-forge")
            assert_package_is_installed(prefix, "six")

            output, error = run_command(Commands.LIST, prefix, "--explicit")
            self.assertIn("conda-forge", output)

            try:
                with tempfile.NamedTemporaryFile(mode="w", suffix="txt", delete=False) as env_txt:
                    env_txt.write(output)
                    env_txt.close()
                    prefix2 = make_temp_prefix()
                    run_command(Commands.CREATE, prefix2, "--file " + env_txt.name)

                    assert_package_is_installed(prefix2, "python")
                    assert_package_is_installed(prefix2, "six")
                output2, _ = run_command(Commands.LIST, prefix2, "--explicit")
                self.assertEqual(output, output2)
            finally:
                rm_rf(env_txt.name)
Exemplo n.º 24
0
def build_package(package, version=None):
    if conda_package_exists(package):
        return 0
    if ' ' in package:
        package, version = package.split(' ')
    try:
        directory = build_recipe(package, version=version)
        dependencies = convert_recipe(directory, package)
    except RuntimeError:
        directory, dependencies = make_recipe(package, version)

    try:
        print("package = %s" % package)
        print("   dependencies = %s" % dependencies)
        # Dependencies will be either package_name or
        #  package_name version_number
        # Only == dependency specs get version numbers
        # All else are just handled without a version spec
        for depend in dependencies:
            build_package(depend)
        args = build_template.format(directory).split()
        print("Building conda package for {0}".format(package.lower()))
        result = subprocess.Popen(args).wait()
        if result == 0 and binstar_upload:
            m = MetaData(directory)
            handle_binstar_upload(build.bldpkg_path(m))
    finally:
        rm_rf(directory)
    return result
Exemplo n.º 25
0
def get_pkginfo(package, filename, pypiurl, md5, python_version):
    # Unfortunately, two important pieces of metadata are only stored in
    # the package itself: the dependencies, and the entry points (if the
    # package uses distribute).  Our strategy is to download the package
    # and "fake" distribute/setuptools's setup() function to get this
    # information from setup.py. If this sounds evil, keep in mind that
    # distribute itself already works by monkeypatching distutils.
    import yaml
    tempdir = mkdtemp('conda_skeleton_' + filename)

    if not isdir(SRC_CACHE):
        makedirs(SRC_CACHE)

    try:
        # Download it to the build source cache. That way, you have
        # it.
        download_path = join(SRC_CACHE, filename)
        if not isfile(download_path) or \
                hashsum_file(download_path, 'md5') != md5:
            download(pypiurl, join(SRC_CACHE, filename))
        else:
            print("Using cached download")
        print("Unpacking %s..." % package)
        unpack(join(SRC_CACHE, filename), tempdir)
        print("done")
        print("working in %s" % tempdir)
        src_dir = get_dir(tempdir)
        # TODO: find args parameters needed by run_setuppy
        run_setuppy(src_dir, tempdir, python_version)
        with open(join(tempdir, 'pkginfo.yaml')) as fn:
            pkginfo = yaml.load(fn)
    finally:
        rm_rf(tempdir)

    return pkginfo
Exemplo n.º 26
0
 def test_calls_rmtree_at_least_once_on_isdir_true(self):
     with self.generate_directory_mocks() as mocks:
         some_path = self.generate_random_path
         install.rm_rf(some_path)
     mocks['rmtree'].assert_called_with(some_path,
                                        onerror=warn_failed_remove,
                                        ignore_errors=False)
Exemplo n.º 27
0
def run_setuppy(src_dir, temp_dir, args):
    '''
    Patch distutils and then run setup.py in a subprocess.

    :param src_dir: Directory containing the source code
    :type src_dir: str
    :param temp_dir: Temporary directory for doing for storing pkginfo.yaml
    :type temp_dir: str
    '''
    # Do everything in the build env in case the setup.py install goes
    # haywire.
    # TODO: Try with another version of Python if this one fails. Some
    # packages are Python 2 or Python 3 only.
    create_env(build_prefix,
               ['python %s*' % args.python_version, 'pyyaml', 'setuptools'],
               clear_cache=False)
    stdlib_dir = join(
        build_prefix, 'Lib' if sys.platform == 'win32' else 'lib/python%s' %
        args.python_version)

    patch = join(temp_dir, 'pypi-distutils.patch')
    with open(patch, 'w') as f:
        f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\')))

    if exists(join(stdlib_dir, 'distutils', 'core.py-copy')):
        rm_rf(join(stdlib_dir, 'distutils', 'core.py'))
        copy2(join(stdlib_dir, 'distutils', 'core.py-copy'),
              join(stdlib_dir, 'distutils', 'core.py'))
        # Avoid race conditions. Invalidate the cache.
        if PY3:
            rm_rf(
                join(stdlib_dir, 'distutils', '__pycache__',
                     'core.cpython-%s%s.pyc' % sys.version_info[:2]))
            rm_rf(
                join(stdlib_dir, 'distutils', '__pycache__',
                     'core.cpython-%s%s.pyo' % sys.version_info[:2]))
        else:
            rm_rf(join(stdlib_dir, 'distutils', 'core.pyc'))
            rm_rf(join(stdlib_dir, 'distutils', 'core.pyo'))
    else:
        copy2(join(stdlib_dir, 'distutils', 'core.py'),
              join(stdlib_dir, 'distutils', 'core.py-copy'))
    apply_patch(join(stdlib_dir, 'distutils'), patch)

    # Save PYTHONPATH for later
    env = os.environ.copy()
    if 'PYTHONPATH' in env:
        env[str('PYTHONPATH')] = str(src_dir + ':' + env['PYTHONPATH'])
    else:
        env[str('PYTHONPATH')] = str(src_dir)
    cwd = getcwd()
    chdir(src_dir)
    args = [build_python, 'setup.py', 'install']
    try:
        subprocess.check_call(args, env=env)
    except subprocess.CalledProcessError:
        print('$PYTHONPATH = %s' % env['PYTHONPATH'])
        sys.exit('Error: command failed: %s' % ' '.join(args))
    finally:
        chdir(cwd)
Exemplo n.º 28
0
def run_setuppy(src_dir, temp_dir, python_version, setup_options):
    '''
    Patch distutils and then run setup.py in a subprocess.

    :param src_dir: Directory containing the source code
    :type src_dir: str
    :param temp_dir: Temporary directory for doing for storing pkginfo.yaml
    :type temp_dir: str
    '''
    # Do everything in the build env in case the setup.py install goes
    # haywire.
    # TODO: Try with another version of Python if this one fails. Some
    # packages are Python 2 or Python 3 only.
    create_env(config.build_prefix, ['python %s*' % python_version,
                                     'pyyaml', 'yaml',
                                     'setuptools', 'numpy'], clear_cache=False)
    stdlib_dir = join(config.build_prefix,
                      'Lib' if sys.platform == 'win32'
                      else 'lib/python%s' % python_version)

    patch = join(temp_dir, 'pypi-distutils.patch')
    with open(patch, 'w') as f:
        f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\')))

    if exists(join(stdlib_dir, 'distutils', 'core.py-copy')):
        rm_rf(join(stdlib_dir, 'distutils', 'core.py'))
        copy2(join(stdlib_dir, 'distutils', 'core.py-copy'),
              join(stdlib_dir, 'distutils', 'core.py'))
        # Avoid race conditions. Invalidate the cache.
        if PY3:
            rm_rf(join(stdlib_dir, 'distutils', '__pycache__',
                'core.cpython-%s%s.pyc' % sys.version_info[:2]))
            rm_rf(join(stdlib_dir, 'distutils', '__pycache__',
                'core.cpython-%s%s.pyo' % sys.version_info[:2]))
        else:
            rm_rf(join(stdlib_dir, 'distutils', 'core.pyc'))
            rm_rf(join(stdlib_dir, 'distutils', 'core.pyo'))
    else:
        copy2(join(stdlib_dir, 'distutils', 'core.py'), join(stdlib_dir,
            'distutils', 'core.py-copy'))
    apply_patch(join(stdlib_dir, 'distutils'), patch)

    # Save PYTHONPATH for later
    env = os.environ.copy()
    if 'PYTHONPATH' in env:
        env[str('PYTHONPATH')] = str(src_dir + ':' + env['PYTHONPATH'])
    else:
        env[str('PYTHONPATH')] = str(src_dir)
    cwd = getcwd()
    chdir(src_dir)
    cmdargs = [config.build_python, 'setup.py', 'install']
    cmdargs.extend(setup_options)
    try:
        subprocess.check_call(cmdargs, env=env)
    except subprocess.CalledProcessError:
        print('$PYTHONPATH = %s' % env['PYTHONPATH'])
        sys.exit('Error: command failed: %s' % ' '.join(cmdargs))
    finally:
        chdir(cwd)
Exemplo n.º 29
0
def execute(args, parser):
    import sys

    import conda.plan as plan
    from conda.api import get_index
    from conda.cli import pscheck
    from conda.install import rm_rf, linked

    if not (args.all or args.package_names):
        sys.exit('Error: no package names supplied,\n'
                 '       try "conda remove -h" for more details')

    prefix = common.get_prefix(args)
    common.check_write('remove', prefix)

    index = None
    if args.features:
        channel_urls = args.channel or ()

        common.ensure_override_channels_requires_channel(args)
        index = get_index(channel_urls=channel_urls,
                          prepend=not args.override_channels)
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            sys.exit('Error: cannot remove root environment,\n'
                     '       add -n NAME or -p PREFIX option')

        actions = {plan.PREFIX: prefix,
                   plan.UNLINK: sorted(linked(prefix))}

    else:
        specs = common.specs_from_args(args.package_names)
        if (plan.is_root_prefix(prefix) and
            common.names_in_specs(common.root_no_rm, specs)):
            sys.exit('Error: cannot remove %s from root environment' %
                     ', '.join(common.root_no_rm))
        actions = plan.remove_actions(prefix, specs)

    if plan.nothing_to_do(actions):
        if args.all:
            rm_rf(prefix)
            return
        sys.exit('Error: no packages found to remove from '
                 'environment: %s' % prefix)

    print()
    print("Package plan for package removal in environment %s:" % prefix)
    plan.display_actions(actions)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)

    if args.all:
        rm_rf(prefix)
Exemplo n.º 30
0
 def test_dispatch_to_subprocess_on_error_on_windows(self):
     with self.generate_directory_mocks(on_win=True) as mocks:
         mocks['rmtree'].side_effect = OSError
         some_path = generate_random_path()
         install.rm_rf(some_path)
     check_call = mocks['check_call']
     expected_arg = ['cmd', '/c', 'rd', '/s', '/q', some_path]
     check_call.assert_called_with(expected_arg)
Exemplo n.º 31
0
 def test_retries_as_many_as_max_retries_plus_one(self):
     max_retries = random.randint(7, 10)
     with self.generate_directory_mocks() as mocks:
         mocks['rmtree'].side_effect = OSError
         some_path = self.generate_random_path
         with self.assertRaises(OSError):
             install.rm_rf(some_path, max_retries=max_retries)
     self.assertEqual(max_retries + 1, mocks['rmtree'].call_count)
Exemplo n.º 32
0
 def test_calls_rmtree_and_rename_on_win(self):
     with self.generate_directory_mocks(on_win=True) as mocks:
         some_path = self.generate_random_path
         install.rm_rf(some_path)
     assert mocks['rename'].call_count == 1
     assert mocks['rmtree'].call_count == 1
     assert mocks['rename'].call_args[0][1] == mocks['rmtree'].call_args[0][
         0]
Exemplo n.º 33
0
 def test_calls_rmtree_and_rename_on_unix(self):
     with self.generate_directory_mocks() as mocks:
         mocks['rmtree'].side_effect = OSError
         some_path = self.generate_random_path
         install.rm_rf(some_path)
     assert mocks['rename'].call_count == 1
     assert mocks['rmtree'].call_count > 1
     assert dirname(mocks['rename'].call_args[0][1]) == mocks['rmtree'].call_args[0][0]
Exemplo n.º 34
0
 def test_dispatch_to_subprocess_on_error_on_windows(self):
     with self.generate_directory_mocks(on_win=True) as mocks:
         mocks['rmtree'].side_effect = OSError
         some_path = generate_random_path()
         install.rm_rf(some_path)
     check_call = mocks['check_call']
     expected_arg = ['cmd', '/c', 'rd', '/s', '/q', some_path]
     check_call.assert_called_with(expected_arg)
Exemplo n.º 35
0
 def test_retries_as_many_as_max_retries_plus_one(self):
     max_retries = random.randint(7, 10)
     with self.generate_directory_mocks() as mocks:
         mocks['rmtree'].side_effect = OSError
         some_path = self.generate_random_path
         with self.assertRaises(OSError):
             install.rm_rf(some_path, max_retries=max_retries)
     self.assertEqual(max_retries + 1, mocks['rmtree'].call_count)
Exemplo n.º 36
0
    def test_pauses_for_same_number_of_seconds_as_max_retries(self):
        with self.generate_directory_mocks() as mocks:
            mocks["rmtree"].side_effect = OSError
            max_retries = random.randint(1, 10)
            with self.assertRaises(OSError):
                install.rm_rf(self.generate_random_path, max_retries=max_retries, trash=False)

        expected = [mock.call(i) for i in range(max_retries)]
        mocks["sleep"].assert_has_calls(expected)
Exemplo n.º 37
0
def execute(args, parser):
    import sys

    import conda.plan as plan
    from conda.api import get_index
    from conda.cli import pscheck
    from conda.install import rm_rf, linked

    if not (args.all or args.package_names):
        sys.exit('Error: no package names supplied,\n'
                 '       try "conda remove -h" for more details')

    prefix = common.get_prefix(args)
    common.check_write('remove', prefix)

    index = None
    if args.features:
        common.ensure_override_channels_requires_channel(args)
        channel_urls = args.channel or ()
        index = get_index(channel_urls=channel_urls,
                          prepend=not args.override_channels)
        features = set(args.package_names)
        actions = plan.remove_features_actions(prefix, index, features)

    elif args.all:
        if plan.is_root_prefix(prefix):
            sys.exit('Error: cannot remove root environment,\n'
                     '       add -n NAME or -p PREFIX option')

        actions = {plan.PREFIX: prefix, plan.UNLINK: sorted(linked(prefix))}

    else:
        specs = common.specs_from_args(args.package_names)
        if (plan.is_root_prefix(prefix)
                and common.names_in_specs(common.root_no_rm, specs)):
            sys.exit('Error: cannot remove %s from root environment' %
                     ', '.join(common.root_no_rm))
        actions = plan.remove_actions(prefix, specs)

    if plan.nothing_to_do(actions):
        if args.all:
            rm_rf(prefix)
            return
        sys.exit('Error: no packages found to remove from '
                 'environment: %s' % prefix)

    print()
    print("Package plan for package removal in environment %s:" % prefix)
    plan.display_actions(actions)

    if not pscheck.main(args):
        common.confirm_yn(args)

    plan.execute_actions(actions, index, verbose=not args.quiet)

    if args.all:
        rm_rf(prefix)
Exemplo n.º 38
0
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False):
    try:
        prefix = tempfile.mkdtemp('conda')
        specs = common.specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform)
        plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint)
    finally:
        ci.rm_rf(prefix)
Exemplo n.º 39
0
 def test_calls_rmtree_and_rename_on_unix(self):
     with self.generate_directory_mocks() as mocks:
         mocks['rmtree'].side_effect = OSError
         some_path = self.generate_random_path
         install.rm_rf(some_path)
     assert mocks['rename'].call_count == 1
     assert mocks['rmtree'].call_count > 1
     assert dirname(
         mocks['rename'].call_args[0][1]) == mocks['rmtree'].call_args[0][0]
Exemplo n.º 40
0
def check_install(packages, platform=None, channel_urls=(), prepend=True):
    try:
        prefix = tempfile.mkdtemp('conda')
        specs = common.specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform)
        plan.install_actions(prefix, index, specs)
    finally:
        ci.rm_rf(prefix)
Exemplo n.º 41
0
 def test_calls_rename_if_unlink_fails(self):
     with self.generate_mocks() as mocks:
         mocks["unlink"].side_effect = OSError(errno.ENOENT, "blah")
         some_path = self.generate_random_path
         install.rm_rf(some_path)
     assert mocks["unlink"].call_count > 1
     assert mocks["rename"].call_count == 1
     rename_args = mocks["rename"].call_args[0]
     assert rename_args[0] == mocks["unlink"].call_args_list[0][0][0]
     assert dirname(rename_args[1]) in (ca[0][0] for ca in mocks["unlink"].call_args_list)
Exemplo n.º 42
0
 def test_calls_rename_if_unlink_fails(self):
     with self.generate_mocks() as mocks:
         mocks['unlink'].side_effect = OSError
         some_path = self.generate_random_path
         install.rm_rf(some_path)
     assert mocks['unlink'].call_count > 1
     assert mocks['rename'].call_count == 1
     rename_args = mocks['rename'].call_args[0]
     assert rename_args[0] == mocks['unlink'].call_args_list[0][0][0]
     assert dirname(rename_args[1]) == mocks['unlink'].call_args_list[1][0][0]
Exemplo n.º 43
0
    def test_continues_calling_until_max_tries_on_called_process_errors_on_windows(self):
        max_retries = random.randint(6, 10)
        with self.generate_directory_mocks(on_win=True) as mocks:
            mocks['rmtree'].side_effect = OSError
            mocks['check_call'].side_effect = subprocess.CalledProcessError(1, "cmd")
            with self.assertRaises(OSError):
                install.rm_rf(generate_random_path(), max_retries=max_retries)

        self.assertEqual((max_retries * 2) + 1, mocks['rmtree'].call_count)
        self.assertEqual(max_retries, mocks['check_call'].call_count)
Exemplo n.º 44
0
    def test_pauses_for_same_number_of_seconds_as_max_retries(self):
        with self.generate_directory_mocks() as mocks:
            mocks['rmtree'].side_effect = OSError
            max_retries = random.randint(1, 10)
            with self.assertRaises(OSError):
                install.rm_rf(self.generate_random_path,
                              max_retries=max_retries)

        expected = [mock.call(i) for i in range(max_retries)]
        mocks['sleep'].assert_has_calls(expected)
Exemplo n.º 45
0
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False):
    try:
        prefix = tempfile.mkdtemp('conda')
        specs = common.specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform)
        actions = plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint)
        plan.display_actions(actions, index)
        return actions
    finally:
        ci.rm_rf(prefix)
Exemplo n.º 46
0
    def test_calls_until_max_tries_on_called_process_errors_on_windows(self):
        max_retries = random.randint(6, 10)
        with self.generate_directory_mocks(on_win=True) as mocks:
            mocks['rmtree'].side_effect = OSError
            mocks['check_call'].side_effect = subprocess.CalledProcessError(
                1, "cmd")
            with self.assertRaises(OSError):
                install.rm_rf(generate_random_path(), max_retries=max_retries)

        self.assertEqual((max_retries * 2) + 1, mocks['rmtree'].call_count)
        self.assertEqual(max_retries, mocks['check_call'].call_count)
Exemplo n.º 47
0
 def test_calls_rename_if_unlink_fails(self):
     with self.generate_mocks() as mocks:
         mocks['unlink'].side_effect = OSError(errno.ENOENT, "blah")
         some_path = self.generate_random_path
         install.rm_rf(some_path)
     assert mocks['unlink'].call_count > 1
     assert mocks['rename'].call_count == 1
     rename_args = mocks['rename'].call_args[0]
     assert rename_args[0] == mocks['unlink'].call_args_list[0][0][0]
     assert dirname(
         rename_args[1]) in (ca[0][0]
                             for ca in mocks['unlink'].call_args_list)
Exemplo n.º 48
0
    def test_tries_extra_kwarg_on_windows(self):
        with self.generate_directory_mocks(on_win=True) as mocks:
            random_path = self.generate_random_path
            mocks['rmtree'].side_effect = [OSError, None]
            install.rm_rf(random_path)

        expected_call_list = [
            mock.call(random_path, ignore_errors=False, onerror=warn_failed_remove),
            mock.call(random_path, onerror=install._remove_readonly)
        ]
        mocks['rmtree'].assert_has_calls(expected_call_list)
        self.assertEqual(2, mocks['rmtree'].call_count)
Exemplo n.º 49
0
    def test_tries_extra_kwarg_on_windows(self):
        with self.generate_directory_mocks(on_win=True) as mocks:
            random_path = self.generate_random_path
            mocks['rmtree'].side_effect = [OSError, None]
            install.rm_rf(random_path)

        expected_call_list = [
            mock.call(random_path),
            mock.call(random_path, onerror=install._remove_readonly)
        ]
        mocks['rmtree'].assert_has_calls(expected_call_list)
        self.assertEqual(2, mocks['rmtree'].call_count)
Exemplo n.º 50
0
def render_recipe(recipe_path, no_download_source, verbose, dirty=False):
    if not isdir(config.croot):
        os.makedirs(config.croot)
    with Locked(config.croot):
        if not dirty:
            if sys.platform == 'win32':
                if isdir(source.WORK_DIR):
                    move_to_trash(source.WORK_DIR, '')
            else:
                rm_rf(source.WORK_DIR)

            assert not isdir(source.WORK_DIR), (
                "Failed to clean work directory.  Please close open"
                " programs/terminals/folders and try again.")

        arg = recipe_path
        # Don't use byte literals for paths in Python 2
        if not PY3:
            arg = arg.decode(getpreferredencoding() or 'utf-8')
        if isfile(arg):
            if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(arg, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % arg)
                return
        else:
            recipe_dir = abspath(arg)
            need_cleanup = False

        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        try:
            m = MetaData(recipe_dir)
        except exceptions.YamlParsingError as e:
            sys.stderr.write(e.error_msg())
            sys.exit(1)

        m, need_download, need_reparse_in_env = parse_or_try_download(
            m,
            no_download_source=no_download_source,
            verbose=verbose,
            dirty=dirty)

        if need_cleanup:
            shutil.rmtree(recipe_dir)

    return m, need_download, need_reparse_in_env
Exemplo n.º 51
0
def run_setuppy(src_dir, temp_dir, python_version):
    """
    Patch distutils and then run setup.py in a subprocess.

    :param src_dir: Directory containing the source code
    :type src_dir: str
    :param temp_dir: Temporary directory for doing for storing pkginfo.yaml
    :type temp_dir: str
    """
    # Do everything in the build env in case the setup.py install goes
    # haywire.
    # TODO: Try with another version of Python if this one fails. Some
    # packages are Python 2 or Python 3 only.
    create_env(config.build_prefix, ["python %s*" % python_version, "pyyaml", "setuptools", "numpy"], clear_cache=False)
    stdlib_dir = join(config.build_prefix, "Lib" if sys.platform == "win32" else "lib/python%s" % python_version)

    patch = join(temp_dir, "pypi-distutils.patch")
    with open(patch, "w") as f:
        f.write(DISTUTILS_PATCH.format(temp_dir.replace("\\", "\\\\")))

    if exists(join(stdlib_dir, "distutils", "core.py-copy")):
        rm_rf(join(stdlib_dir, "distutils", "core.py"))
        copy2(join(stdlib_dir, "distutils", "core.py-copy"), join(stdlib_dir, "distutils", "core.py"))
        # Avoid race conditions. Invalidate the cache.
        if PY3:
            rm_rf(join(stdlib_dir, "distutils", "__pycache__", "core.cpython-%s%s.pyc" % sys.version_info[:2]))
            rm_rf(join(stdlib_dir, "distutils", "__pycache__", "core.cpython-%s%s.pyo" % sys.version_info[:2]))
        else:
            rm_rf(join(stdlib_dir, "distutils", "core.pyc"))
            rm_rf(join(stdlib_dir, "distutils", "core.pyo"))
    else:
        copy2(join(stdlib_dir, "distutils", "core.py"), join(stdlib_dir, "distutils", "core.py-copy"))
    apply_patch(join(stdlib_dir, "distutils"), patch)

    # Save PYTHONPATH for later
    env = os.environ.copy()
    if "PYTHONPATH" in env:
        env[str("PYTHONPATH")] = str(src_dir + ":" + env["PYTHONPATH"])
    else:
        env[str("PYTHONPATH")] = str(src_dir)
    cwd = getcwd()
    chdir(src_dir)
    cmdargs = [config.build_python, "setup.py", "install"]
    try:
        subprocess.check_call(cmdargs, env=env)
    except subprocess.CalledProcessError:
        print("$PYTHONPATH = %s" % env["PYTHONPATH"])
        sys.exit("Error: command failed: %s" % " ".join(cmdargs))
    finally:
        chdir(cwd)
Exemplo n.º 52
0
    def test_logs_messages_generated_for_each_retry(self):
        with self.generate_directory_mocks() as mocks:
            random_path = self.generate_random_path
            mocks["rmtree"].side_effect = OSError(random_path)
            max_retries = random.randint(1, 10)
            with self.assertRaises(OSError):
                install.rm_rf(random_path, max_retries=max_retries, trash=False)

        log_template = "\n".join(
            ["Unable to delete %s" % random_path, "%s" % OSError(random_path), "Retrying after %d seconds..."]
        )

        expected_call_list = [mock.call(log_template % i) for i in range(max_retries)]
        mocks["log"].debug.assert_has_calls(expected_call_list)
Exemplo n.º 53
0
def ensure_linked_actions(dists, prefix):
    actions = defaultdict(list)
    actions[inst.PREFIX] = prefix
    for dist in dists:
        if install.is_linked(prefix, dist):
            continue

        extracted_in = extracted_where(dist)
        if extracted_in:
            if config.always_copy:
                lt = install.LINK_COPY
            elif install.try_hard_link(extracted_in, prefix, dist):
                lt = install.LINK_HARD
            else:
                lt = (install.LINK_SOFT if
                      (config.allow_softlinks
                       and sys.platform != 'win32') else install.LINK_COPY)
            actions[inst.LINK].append('%s %s %d' % (dist, extracted_in, lt))
        else:
            # Make a guess from the first pkgs dir, which is where it will be
            # extracted
            try:
                os.makedirs(join(config.pkgs_dirs[0], dist, 'info'))
                index_json = join(config.pkgs_dirs[0], dist, 'info',
                                  'index.json')
                with open(index_json, 'w'):
                    pass
                if config.always_copy:
                    lt = install.LINK_COPY
                elif install.try_hard_link(config.pkgs_dirs[0], prefix, dist):
                    lt = install.LINK_HARD
                else:
                    lt = (install.LINK_SOFT if
                          (config.allow_softlinks
                           and sys.platform != 'win32') else install.LINK_COPY)
                actions[inst.LINK].append('%s %s %d' %
                                          (dist, config.pkgs_dirs[0], lt))
            except (OSError, IOError):
                actions[inst.LINK].append(dist)
            finally:
                try:
                    install.rm_rf(join(config.pkgs_dirs[0], dist))
                except (OSError, IOError):
                    pass

            actions[inst.EXTRACT].append(dist)
            if install.is_fetched(config.pkgs_dirs[0], dist):
                continue
            actions[inst.FETCH].append(dist)
    return actions
Exemplo n.º 54
0
Arquivo: plan.py Projeto: alexbw/conda
def ensure_linked_actions(dists, prefix):
    actions = defaultdict(list)
    actions[inst.PREFIX] = prefix
    for dist in dists:
        if install.is_linked(prefix, dist):
            continue

        extracted_in = extracted_where(dist)
        if extracted_in:
            if config.always_copy:
                lt = install.LINK_COPY
            elif install.try_hard_link(extracted_in, prefix, dist):
                lt = install.LINK_HARD
            else:
                lt = (install.LINK_SOFT if (config.allow_softlinks and
                                            sys.platform != 'win32') else
                      install.LINK_COPY)
            actions[inst.LINK].append('%s %s %d' % (dist, extracted_in, lt))
        else:
            # Make a guess from the first pkgs dir, which is where it will be
            # extracted
            try:
                os.makedirs(join(config.pkgs_dirs[0], dist, 'info'))
                index_json = join(config.pkgs_dirs[0], dist, 'info',
                                  'index.json')
                with open(index_json, 'w'):
                    pass
                if config.always_copy:
                    lt = install.LINK_COPY
                elif install.try_hard_link(config.pkgs_dirs[0], prefix, dist):
                    lt = install.LINK_HARD
                else:
                    lt = (install.LINK_SOFT if (config.allow_softlinks and
                                                sys.platform != 'win32') else
                          install.LINK_COPY)
                actions[inst.LINK].append('%s %s %d' % (dist, config.pkgs_dirs[0], lt))
            except (OSError, IOError):
                actions[inst.LINK].append(dist)
            finally:
                try:
                    install.rm_rf(join(config.pkgs_dirs[0], dist))
                except (OSError, IOError):
                    pass

            actions[inst.EXTRACT].append(dist)
            if install.is_fetched(config.pkgs_dirs[0], dist):
                continue
            actions[inst.FETCH].append(dist)
    return actions
Exemplo n.º 55
0
def render_recipe(recipe_path, no_download_source, verbose, dirty=False):
    if not isdir(config.croot):
        os.makedirs(config.croot)
    with Locked(config.croot):
        if not dirty:
            if sys.platform == 'win32':
                if isdir(source.WORK_DIR):
                    move_to_trash(source.WORK_DIR, '')
            else:
                rm_rf(source.WORK_DIR)

            assert not isdir(source.WORK_DIR), ("Failed to clean work directory.  Please close open"
                                        " programs/terminals/folders and try again.")

        arg = recipe_path
        # Don't use byte literals for paths in Python 2
        if not PY3:
            arg = arg.decode(getpreferredencoding() or 'utf-8')
        if isfile(arg):
            if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(arg, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % arg)
                return
        else:
            recipe_dir = abspath(arg)
            need_cleanup = False

        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        try:
            m = MetaData(recipe_dir)
        except exceptions.YamlParsingError as e:
            sys.stderr.write(e.error_msg())
            sys.exit(1)

        m, need_download, need_reparse_in_env = parse_or_try_download(m,
                                                            no_download_source=no_download_source,
                                                            verbose=verbose, dirty=dirty)

        if need_cleanup:
            shutil.rmtree(recipe_dir)

    return m, need_download, need_reparse_in_env
Exemplo n.º 56
0
def check_install(packages, platform=None, channel_urls=(), prepend=True,
                  minimal_hint=False):
    try:
        prefix = tempfile.mkdtemp('conda')
        specs = common.specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform, prefix=prefix)
        linked = ci.linked(prefix)
        plan.add_defaults_to_specs(Resolve(index), linked, specs)
        actions = plan.install_actions(prefix, index, specs, pinned=False,
                                       minimal_hint=minimal_hint)
        plan.display_actions(actions, index)
        return actions
    finally:
        ci.rm_rf(prefix)
Exemplo n.º 57
0
def execute(args, parser):
    name = args.remote_definition or args.name

    try:
        spec = specs.detect(name=name,
                            filename=args.file,
                            directory=os.getcwd(),
                            selectors=args.select)
        env = spec.environment

        # FIXME conda code currently requires args to have a name or prefix
        if args.prefix is None:
            args.name = env.name

    except exceptions.SpecNotFound as e:
        common.error_and_exit(str(e), json=args.json)

    prefix = common.get_prefix(args, search=False)

    if args.force and not is_root_prefix(prefix) and os.path.exists(prefix):
        rm_rf(prefix)
    cli_install.check_prefix(prefix, json=args.json)

    # TODO, add capability
    # common.ensure_override_channels_requires_channel(args)
    # channel_urls = args.channel or ()

    for installer_type, pkg_specs in env.dependencies.items():
        try:
            installer = get_installer(installer_type)
            installer.install(prefix, pkg_specs, args, env)
        except InvalidInstaller:
            sys.stderr.write(
                textwrap.dedent("""
                Unable to install package for {0}.

                Please double check and ensure you dependencies file has
                the correct spelling.  You might also try installing the
                conda-env-{0} package to see if provides the required
                installer.
                """).lstrip().format(installer_type))
            return -1

    touch_nonadmin(prefix)
    if not args.json:
        cli_install.print_activate(args.name if args.name else prefix)
Exemplo n.º 58
0
    def test_logs_messages_generated_for_each_retry(self):
        with self.generate_directory_mocks() as mocks:
            random_path = self.generate_random_path
            mocks['rmtree'].side_effect = OSError(random_path)
            max_retries = random.randint(1, 10)
            with self.assertRaises(OSError):
                install.rm_rf(random_path, max_retries=max_retries)

        log_template = "\n".join([
            "Unable to delete %s" % random_path,
            "%s" % OSError(random_path),
            "Retrying after %d seconds...",
        ])

        expected_call_list = [mock.call(log_template % i)
                              for i in range(max_retries)]
        mocks['log'].debug.assert_has_calls(expected_call_list)