def fetchers(cls, config, target=None): fetchers = [] fetchers.extend( Fetcher([url]) for url in config.getlist('python-repos', 'repos', [])) fetchers.extend( PyPIFetcher(url) for url in config.getlist('python-repos', 'indices', [])) if target and isinstance(target, PythonBinary): fetchers.extend(Fetcher([url]) for url in target.repositories) fetchers.extend(PyPIFetcher(url) for url in target.indices) return fetchers
def build_obtainer(options): interpreter = interpreter_from_options(options) platform = options.platform fetchers = [Fetcher(options.repos)] if options.pypi: fetchers.append(PyPIFetcher()) if options.indices: fetchers.extend(PyPIFetcher(index) for index in options.indices) translator = translator_from_options(options) if options.use_wheel: package_precedence = (WheelPackage, EggPackage, SourcePackage) else: package_precedence = (EggPackage, SourcePackage) obtainer = CachingObtainer( install_cache=options.cache_dir, fetchers=fetchers, translators=translator, precedence=package_precedence) return obtainer
def from_target(cls, config, target, conn_timeout=None): from twitter.common.python.fetcher import PyPIFetcher, Fetcher from twitter.common.python.resolver import Resolver from twitter.common.python.http import Crawler from twitter.common.quantity import Amount, Time conn_timeout_amount = Amount( conn_timeout, Time.SECONDS) if conn_timeout is not None else None crawler = Crawler(cache=config.get('python-setup', 'download_cache'), conn_timeout=conn_timeout_amount) fetchers = [] fetchers.extend( Fetcher([url]) for url in config.getlist('python-repos', 'repos', [])) fetchers.extend( PyPIFetcher(url) for url in config.getlist('python-repos', 'indices', [])) platforms = config.getlist('python-setup', 'platforms', ['current']) if isinstance(target, PythonBinary) and target.platforms: platforms = target.platforms return cls(platforms=get_platforms(platforms), resolver=Resolver(cache=config.get('python-setup', 'install_cache'), crawler=crawler, fetchers=fetchers, install_cache=config.get( 'python-setup', 'install_cache'), conn_timeout=conn_timeout_amount))
def installer(req): # Attempt to obtain the egg from the local cache. If it's an exact match, we can use it. # If it's not an exact match, then if it's been resolved sufficiently recently, we still # use it. dist = egg_obtainer.obtain(req) if dist and (requirement_is_exact(req) or now - os.path.getmtime(dist.location) < ttl): return dist # Failed, so follow through to "remote" resolution source_translator = SourceTranslator( interpreter=interpreter, use_2to3=getattr(req, 'use_2to3', False), **shared_options) translator = ChainedTranslator(egg_translator, source_translator) obtainer = Obtainer( crawler, [Fetcher([req.repository])] if getattr(req, 'repository', None) else fetchers, translator) dist = obtainer.obtain(req) if dist: try: touch(dist.location) except OSError: pass return dist
def fetchers_from_config(config): fetchers = [] fetchers.extend( Fetcher([url]) for url in config.getlist('python-repos', 'repos', [])) fetchers.extend( PyPIFetcher(url) for url in config.getlist('python-repos', 'indices', [])) return fetchers
def fetcher(config): return Fetcher(repositories=config.getlist('python-setup', 'repos', []), indices=config.getlist('python-setup', 'indices', []), external=config.getbool('python-setup', 'allow_pypi', True), download_cache=config.get('python-setup', 'cache', default=None))
def build_pex(args, options): interpreter = None if options.python: if os.path.exists(options.python): interpreter = PythonInterpreter.from_binary(options.python) else: interpreter = PythonInterpreter.from_env(options.python) if interpreter is None: die('Failed to find interpreter: %s' % options.python) pex_builder = PEXBuilder( path=safe_mkdtemp(), interpreter=interpreter, ) pex_info = pex_builder.info pex_info.zip_safe = options.zip_safe pex_info.always_write_cache = options.always_write_cache pex_info.ignore_errors = options.ignore_errors pex_info.inherit_path = options.inherit_path fetchers = [Fetcher(options.repos)] if options.pypi: fetchers.append(PyPIFetcher()) resolveds = requirement_resolver(options.requirements, cache=options.cache_dir, fetchers=fetchers, interpreter=interpreter, platform=options.platform) if resolveds: log('Resolved distributions:', v=options.verbosity) for pkg in resolveds: log(' %s' % pkg, v=options.verbosity) pex_builder.add_distribution(pkg) pex_builder.add_requirement(pkg.as_requirement()) for source_dir in options.source_dirs: try: egg_path = EggInstaller(source_dir).bdist() except EggInstaller.Error: die('Failed to run installer for %s' % source_dir, CANNOT_DISTILL) pex_builder.add_egg(egg_path) if options.entry_point is not None: log('Setting entry point to %s' % options.entry_point, v=options.verbosity) pex_builder.info.entry_point = options.entry_point else: log('Creating environment PEX.', v=options.verbosity) return pex_builder
def iter(self, requirement): if hasattr(requirement, 'repository') and requirement.repository: obtainer = Obtainer(crawler=self._crawler, fetchers=[Fetcher([requirement.repository])], translators=self._translator) for package in obtainer.iter(requirement): yield package else: for package in super(PantsObtainer, self).iter(requirement): yield package
def fetcher_provider(): from twitter.common.python.fetcher import Fetcher cls._log('Initializing fetcher:') cls._log(' repositories: %s' % ' '.join(pex_info.repositories)) cls._log(' indices: %s' % ' '.join(pex_info.indices)) cls._log(' with pypi: %s' % pex_info.allow_pypi) return Fetcher( repositories = pex_info.repositories, indices = pex_info.indices, external = pex_info.allow_pypi, download_cache = pex_info.download_cache )
def make_installer(self, reqs, interpreter, platform): assert len(reqs) == 1 and isinstance( reqs[0], PythonRequirement), 'Got requirement list: %s' % (repr(reqs)) req = reqs[0] fetchers = [Fetcher([req.repository]) ] + self._fetchers if req.repository else self._fetchers translator = ChainedTranslator( EggTranslator(install_cache=self._install_cache, platform=platform, python=interpreter.python), SourceTranslator(install_cache=self._install_cache, interpreter=interpreter, platform=platform, use_2to3=req.use_2to3)) obtainer = Obtainer(self._crawler, fetchers, translator) return obtainer.obtain
def main(cls): from itertools import chain from twitter.common.python.distiller import Distiller from twitter.common.python.fetcher import Fetcher, PyPIFetcher from twitter.common.python.http import Crawler from twitter.common.python.installer import Installer from twitter.common.python.obtainer import Obtainer from twitter.common.python.resolver import Resolver from twitter.common.python.translator import Translator parser = cls.configure_clp() options, args = parser.parse_args() cls.process_logging_options(options) cls.exit_on_erroneous_inputs(options, parser) pex_builder = PEXBuilder() fetchers = [Fetcher(options.repos)] if options.use_pypi: fetchers.append(PyPIFetcher()) resolver = Resolver(cache=options.cache_dir, fetchers=fetchers, install_cache=options.cache_dir) reqs = cls.get_all_valid_reqs(options.requirements, options.requirements_txt) cls.logger.info("Requirements specified: " + str(reqs)) resolveds = resolver.resolve(reqs) cls.logger.info("Resolved requirements: " + str(resolveds)) for pkg in resolveds: cls.logger.info("Adding to PEX: Distribution: {0}".format(pkg)) pex_builder.add_distribution(pkg) pex_builder.add_requirement(pkg.as_requirement()) for source_dir in options.source_dirs: dist = Installer(source_dir).distribution() egg_path = Distiller(dist).distill() cls.logger.info( "Adding source dir to PEX: {0} distilled into egg {1}".format( source_dir, egg_path)) pex_builder.add_egg(egg_path) if options.entry_point is not None: if options.entry_point.endswith(".py"): cls.logger.info("Adding entry point to PEX: File: {0}".format( options.entry_point)) pex_builder.set_executable(options.entry_point) elif ":" in options.entry_point: cls.logger.info( "Adding entry point to PEX: Function: {0}".format( options.entry_point)) pex_builder.info().entry_point = options.entry_point else: cls.logger.warn("Invalid entry point: {0}".format( options.entry_point)) if options.pex_name is not None: cls.logger.info("Saving PEX file at {0}.pex".format( options.pex_name)) pex_builder.build(options.pex_name + '.pex') else: pex_builder.freeze() cls.logger.info("Running PEX file at {0} with args {1}".format( pex_builder.path(), args)) from .pex import PEX pex = PEX(pex_builder.path()) return pex.run(args=list(args)) logging.shutdown()
def resolve_multi(config, requirements, interpreter=None, platforms=None, conn_timeout=None, ttl=3600): """Multi-platform dependency resolution for PEX files. Given a pants configuration and a set of requirements, return a list of distributions that must be included in order to satisfy them. That may involve distributions for multiple platforms. :param config: Pants :class:`Config` object. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param interpreter: :class:`PythonInterpreter` for which requirements should be resolved. If None specified, defaults to current interpreter. :param platforms: Optional list of platforms against requirements will be resolved. If None specified, the defaults from `config` will be used. :param conn_timeout: Optional connection timeout for any remote fetching. :param ttl: Time in seconds before we consider re-resolving an open-ended requirement, e.g. "flask>=0.2" if a matching distribution is available on disk. Defaults to 3600. """ now = time.time() distributions = {} interpreter = interpreter or PythonInterpreter.get() if not isinstance(interpreter, PythonInterpreter): raise TypeError('Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter)) install_cache = PythonSetup(config).scratch_dir('install_cache', default_name='eggs') platforms = get_platforms(platforms or config.getlist('python-setup', 'platforms', ['current'])) crawler = crawler_from_config(config, conn_timeout=conn_timeout) fetchers = fetchers_from_config(config) for platform in platforms: env = PantsEnvironment(search_path=[], platform=platform, python=interpreter.python) working_set = WorkingSet(entries=[]) shared_options = dict(install_cache=install_cache, platform=platform) egg_translator = EggTranslator(python=interpreter.python, **shared_options) egg_obtainer = Obtainer(crawler, [Fetcher([install_cache])], egg_translator) def installer(req): # Attempt to obtain the egg from the local cache. If it's an exact match, we can use it. # If it's not an exact match, then if it's been resolved sufficiently recently, we still # use it. dist = egg_obtainer.obtain(req) if dist and (requirement_is_exact(req) or now - os.path.getmtime(dist.location) < ttl): return dist # Failed, so follow through to "remote" resolution source_translator = SourceTranslator( interpreter=interpreter, use_2to3=getattr(req, 'use_2to3', False), **shared_options) translator = ChainedTranslator(egg_translator, source_translator) obtainer = Obtainer( crawler, [Fetcher([req.repository])] if getattr(req, 'repository', None) else fetchers, translator) dist = obtainer.obtain(req) if dist: try: touch(dist.location) except OSError: pass return dist distributions[platform] = working_set.resolve(requirements, env=env, installer=installer) return distributions