def uninstall_eggs(reqs): """ Remove eggs matching the given requirements. """ # XXX This doesn't do dependencies? dists = [] names = [i.project_name for i in pkg_resources.parse_requirements(reqs)] for name in names: dist = [d for d in pkg_resources.working_set if d.project_name == name] if not dist: raise DistutilsOptionError('Cannot remove package not yet ' 'installed: %s' % name) dist = dist[0] if not dist.location.endswith('.egg'): raise DistutilsOptionError('Not an egg at %s, chickening out' % dist.location) if is_local(dist.location): dists.append(dist) else: log.info("Not uninstalling egg, it's not in our virtualenv: %s" % dist.location) for dist in dists: log.info("Removing %s (%s)" % (dist, dist.location)) shutil.rmtree(dist.location) dependency.remove_from_ws(pkg_resources.working_set, dist)
def purge_req(req): """ Purge a requirement from all our indexes, used for backtracking """ if req.key in best: del best[req.key] [dependency.remove_from_ws(w, req._chosen_dist) for w in (ws, setup_dists) if req._chosen_dist in w]
def install(cmd, reqs, add_to_global=False, prefer_final=True, force_upgrade=False, use_existing=False): """ Installs a given requirement using buildout's modified easy_install. Parameters ---------- cmd : `setuptools.Command` active setuptools Cox8mmand class reqs : `list` list of distutils requirements, eg ['foo==1.2'] add_to_global : `bool` adds installed distribution to the global working_set. This has the effect of making them available for import within this process, used by fetch_build_eggs. prefer_final : `bool` Will prefer released versions of the requirements over dev versions, unless the package is third-party where it always prefers final versions. force_upgrade : `bool` Will force downloads of requirements from PyPI. This is the rough equivalent of ``easy_install -U acme.foo`` use_existing : `bool` Will not update any packages found in the current working set Returns ------- ws : `pkg_resources.WorkingSet` Working Set for the distributions that were just installed. """ # Remove anything we're upgrading if force_upgrade: uninstall_eggs(reqs) # Create installer class configured to install into wherever the command # class was setup for installer = Installer(dest=cmd.install_dir, index=cmd.index_url, prefer_final=prefer_final) # Now apply our runtime additions to its working environment. This includes # adding eggs for the egg cache, and removing eggs that we're forcing to be # upgraded. installer.update_search_path() # This is a bit nasty - we have to monkey-patch the filter for final # versions so that we can also filter for dev versions as well. # Set prefer_final to True, always, so it enables the filter easy_install.prefer_final(True) # This returns a WorkingSet of the packages we just installed. ws = None if use_existing: # NOTE here we pass in the existing stuff - this will prefer installed # packages over ones on the server, eg an installed release # version won't get trumped by a dev version on the server ws = pkg_resources.WorkingSet(pkg_resources.working_set.entries) # We must remove the package we're installing from the 'baseline' ws. # This way we won't get any weird requirement conflicts with new # versions of the package we're trying to set up if cmd.distribution.metadata.name: dist = dependency.get_dist(cmd.distribution.metadata.name) if dist: dependency.remove_from_ws(ws, dist) # There's a chance that there were packages in setup_requires that were # also in install_requires. Because these are classed as 'already # installed' by the installer, they won't have been added to the workingset # of packages to set-up in the next step. # Here we ensure that they're added in along with any of their # own dependencies if they are also part of the package install_requires. # FIXME: this won't pick up non-direct dependencies. # Eg: setup_requires = numpy, # install_requires = something that has numpy as a dependency def also_required(dist): for req in pkg_resources.parse_requirements(reqs): if dist in req: return True return False setup_dists = [i for i in pkg_resources.working_set.resolve( get_setup_requires(cmd.distribution)) if also_required(i)] if setup_dists: log.debug("setup_requires distributions to be set-up:") [log.debug(" %r" % i) for i in setup_dists] # Now run the installer try: to_setup = installer.install(reqs, working_set=ws, use_existing=use_existing) except easy_install.MissingDistribution, e: log.error(e) # TODO: call missing distro hook here sys.exit(1)
def install(cmd, reqs, add_to_global=False, prefer_final=True, force_upgrade=False, use_existing=False): """ Installs a given requirement using buildout's modified easy_install. Parameters ---------- cmd : `setuptools.Command` active setuptools Cox8mmand class reqs : `list` list of distutils requirements, eg ['foo==1.2'] add_to_global : `bool` adds installed distribution to the global working_set. This has the effect of making them available for import within this process, used by fetch_build_eggs. prefer_final : `bool` Will prefer released versions of the requirements over dev versions force_upgrade : `bool` Will force downloads of requirements from PyPI. This is the equivalent of ``easy_install -U my_org.foo`` use_existing : `bool` Will not update any packages found in the current working set Returns ------- ws : `pkg_resources.WorkingSet` Working Set for the distributions that were just installed. """ # Remove anything we're upgrading if force_upgrade: uninstall_eggs(reqs) # Create installer class configured to install into wherever the command class # was setup for installer = Installer(dest=cmd.install_dir, index=cmd.index_url) # Now apply our runtime additions to its working environment. This includes # adding eggs for the egg cache, and removing eggs that we're forcing to be # upgraded. installer.update_search_path() # This is a bit nasty - we have to monkey-patch the filter for final versions # so that we can also filter for dev versions as well. # Set prefer_final to True, always, so it enables the filter easy_install.prefer_final(True) # This returns a WorkingSet of the packages we just installed. ws = None if use_existing: # NOTE here we pass in the existing stuff - this will prefer installed # packages over ones on the server, eg an installed release version # won't get trumped by a dev version on the server ws = pkg_resources.WorkingSet(pkg_resources.working_set.entries) # We must remove the package we're installing from the 'baseline' ws. # This way we won't get any weird requirement conflicts with new versions # of the package we're trying to set up if cmd.distribution.metadata.name: dist = dependency.get_dist(cmd.distribution.metadata.name) if dist: dependency.remove_from_ws(ws, dist) # There's a chance that there were packages in setup_requires that were also # in install_requires. Because these are classed as 'already installed' by # the installer, they won't have been added to the workingset of packages # to set-up in the next step. # Here we ensure that they're added in along with any of their # own dependencies if they are also part of the package install_requires. # FIXME: this won't pick up non-direct dependencies. # Eg: setup_requires = numpy, # install_requires = something that has numpy as dependency def also_required(dist): for req in pkg_resources.parse_requirements(reqs): if dist in req: return True return False setup_dists = [i for i in pkg_resources.working_set.resolve(get_setup_requires(cmd.distribution)) if also_required(i)] if setup_dists: log.debug("setup_requires distributions to be set-up:") [log.debug(" %r" % i) for i in setup_dists] # Now patch the filter method if required and run the installer if prefer_final: to_setup = installer.install(reqs, working_set=ws, use_existing=use_existing) else: with prefer_dev(): to_setup = installer.install(reqs, working_set=ws, use_existing=use_existing) # Add any of the setup_requires dists to be set-up. to_setup = set(to_setup + setup_dists) if to_setup: log.debug('Packages to set-up:') for i in to_setup: log.debug(' %r' % i) # Now we selectively run setuptool's post-install steps. # Luckily, the buildout installer didnt strip off any of the useful metadata # about the console scripts. for dist in to_setup: if dist.location.startswith(manage.get_site_packages()): fix_permissions(dist) cmd.process_distribution(None, dist, deps=False) # Add the distributions to the global registry if we asked for it. This makes # the distro importable, and classed as 'already installed' by the dependency # resolution algorithm. if add_to_global: pkg_resources.working_set.add(dist) else: log.debug('Nothing to set-up.') return to_setup