Ejemplo n.º 1
0
    def from_target(cls, config, target, conn_timeout=None):
        from twitter.common.python.fetcher import PyPIFetcher, Fetcher
        from twitter.common.python.resolver import Resolver
        from twitter.common.python.http import Crawler
        from twitter.common.quantity import Amount, Time

        conn_timeout_amount = Amount(
            conn_timeout, Time.SECONDS) if conn_timeout is not None else None

        crawler = Crawler(cache=config.get('python-setup', 'download_cache'),
                          conn_timeout=conn_timeout_amount)

        fetchers = []
        fetchers.extend(
            Fetcher([url])
            for url in config.getlist('python-repos', 'repos', []))
        fetchers.extend(
            PyPIFetcher(url)
            for url in config.getlist('python-repos', 'indices', []))

        platforms = config.getlist('python-setup', 'platforms', ['current'])
        if isinstance(target, PythonBinary) and target.platforms:
            platforms = target.platforms

        return cls(platforms=get_platforms(platforms),
                   resolver=Resolver(cache=config.get('python-setup',
                                                      'install_cache'),
                                     crawler=crawler,
                                     fetchers=fetchers,
                                     install_cache=config.get(
                                         'python-setup', 'install_cache'),
                                     conn_timeout=conn_timeout_amount))
Ejemplo n.º 2
0
 def crawler(cls, config, conn_timeout=None):
     return Crawler(cache=config.get('python-setup', 'download_cache'),
                    conn_timeout=conn_timeout)
Ejemplo n.º 3
0
def crawler_from_config(config, conn_timeout=None):
  download_cache = PythonSetup(config).scratch_dir('download_cache', default_name='downloads')
  return Crawler(cache=download_cache, conn_timeout=conn_timeout)