Esempio n. 1
0
    def setUp(self):
        super(FetcherTest, self).setUp()

        self.requests = self.mox.CreateMockAnything()
        self.response = self.mox.CreateMock(requests.Response)
        self.fetcher = Fetcher(requests_api=self.requests)
        self.listener = self.mox.CreateMock(Fetcher.Listener)
Esempio n. 2
0
    def _bootstrap_ivy(self, bootstrap_jar_path):
        options = self._ivy_subsystem.get_options()
        if not os.path.exists(bootstrap_jar_path):
            with temporary_file() as bootstrap_jar:
                fetcher = Fetcher(get_buildroot())
                checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
                try:
                    logger.info('\nDownloading {}'.format(
                        options.bootstrap_jar_url))
                    # TODO: Capture the stdout of the fetcher, instead of letting it output
                    # to the console directly.
                    fetcher.download(
                        options.bootstrap_jar_url,
                        listener=fetcher.ProgressListener().wrap(checksummer),
                        path_or_fd=bootstrap_jar,
                        timeout_secs=options.bootstrap_fetch_timeout_secs)
                    logger.info('sha1: {}'.format(checksummer.checksum))
                    bootstrap_jar.close()
                    touch(bootstrap_jar_path)
                    shutil.move(bootstrap_jar.name, bootstrap_jar_path)
                except fetcher.Error as e:
                    raise self.Error(
                        'Problem fetching the ivy bootstrap jar! {}'.format(e))

        return Ivy(bootstrap_jar_path,
                   ivy_settings=options.bootstrap_ivy_settings
                   or options.ivy_settings,
                   ivy_resolution_cache_dir=self._ivy_subsystem.
                   resolution_cache_dir(),
                   extra_jvm_options=self._ivy_subsystem.extra_jvm_options())
Esempio n. 3
0
  def bootstrap_coursier(self, workunit_factory):

    opts = self.get_options()
    bootstrap_url = opts.bootstrap_jar_url

    coursier_bootstrap_dir = os.path.join(opts.pants_bootstrapdir,
                                          'tools', 'jvm', 'coursier',
                                          opts.version)

    bootstrap_jar_path = os.path.join(coursier_bootstrap_dir, 'coursier.jar')

    if not os.path.exists(bootstrap_jar_path):
      with workunit_factory(name='bootstrap-coursier', labels=[WorkUnitLabel.TOOL]) as workunit:
        with safe_concurrent_creation(bootstrap_jar_path) as temp_path:
          fetcher = Fetcher(get_buildroot())
          checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
          try:
            logger.info('\nDownloading {}'.format(bootstrap_url))
            # TODO: Capture the stdout of the fetcher, instead of letting it output
            # to the console directly.
            fetcher.download(bootstrap_url,
                             listener=fetcher.ProgressListener().wrap(checksummer),
                             path_or_fd=temp_path,
                             timeout_secs=opts.bootstrap_fetch_timeout_secs)
            logger.info('sha1: {}'.format(checksummer.checksum))
          except fetcher.Error as e:
            workunit.set_outcome(WorkUnit.FAILURE)
            raise self.Error('Problem fetching the coursier bootstrap jar! {}'.format(e))
          else:
            workunit.set_outcome(WorkUnit.SUCCESS)

    return bootstrap_jar_path
Esempio n. 4
0
    def _bootstrap_ivy(self, bootstrap_jar_path):
        if not os.path.exists(bootstrap_jar_path):
            with temporary_file() as bootstrap_jar:
                fetcher = Fetcher()
                checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
                try:
                    log.info('\nDownloading %s' % self._bootstrap_jar_url)
                    # TODO: Capture the stdout of the fetcher, instead of letting it output
                    # to the console directly.
                    fetcher.download(
                        self._bootstrap_jar_url,
                        listener=fetcher.ProgressListener().wrap(checksummer),
                        path_or_fd=bootstrap_jar,
                        timeout_secs=self._timeout_secs)
                    log.info('sha1: %s' % checksummer.checksum)
                    bootstrap_jar.close()
                    touch(bootstrap_jar_path)
                    shutil.move(bootstrap_jar.name, bootstrap_jar_path)
                except fetcher.Error as e:
                    raise self.Error(
                        'Problem fetching the ivy bootstrap jar! %s' % e)

        return Ivy(bootstrap_jar_path,
                   ivy_settings=self._ivy_settings,
                   ivy_cache_dir=self.ivy_cache_dir)
Esempio n. 5
0
  def _select_binary_stream(self, name, binary_path, fetcher=None):
    """Select a binary matching the current os and architecture.

    :param string binary_path: The path to the binary to fetch.
    :param fetcher: Optional argument used only for testing, to 'pretend' to open urls.
    :returns: a 'stream' to download it from a support directory. The returned 'stream' is actually
      a lambda function which returns the files binary contents.
    :raises: :class:`pants.binary_util.BinaryUtil.BinaryNotFound` if no binary of the given version
      and name could be found for the current platform.
    """

    if not self._baseurls:
      raise self.NoBaseUrlsError(
          'No urls are defined for the --pants-support-baseurls option.')
    downloaded_successfully = False
    accumulated_errors = []
    for baseurl in OrderedSet(self._baseurls):  # Wrap in OrderedSet because duplicates are wasteful.
      url = posixpath.join(baseurl, binary_path)
      logger.info('Attempting to fetch {name} binary from: {url} ...'.format(name=name, url=url))
      try:
        with temporary_file() as dest:
          fetcher = fetcher or Fetcher()
          fetcher.download(url, listener=Fetcher.ProgressListener(), path_or_fd=dest)
          logger.info('Fetched {name} binary from: {url} .'.format(name=name, url=url))
          downloaded_successfully = True
          dest.seek(0)
          yield lambda: dest.read()
          break
      except (IOError, Fetcher.Error, ValueError) as e:
        accumulated_errors.append('Failed to fetch binary from {url}: {error}'
                                  .format(url=url, error=e))
    if not downloaded_successfully:
      raise self.BinaryNotFound(binary_path, accumulated_errors)
Esempio n. 6
0
 def assert_local_file_fetch_relative(self, url, *rel_path):
     expected_contents = b'proof'
     with temporary_dir() as root_dir:
         with safe_open(os.path.join(root_dir, *rel_path), 'wb') as fp:
             fp.write(expected_contents)
         with temporary_file() as download_fp:
             Fetcher(root_dir).download(url, path_or_fd=download_fp)
             download_fp.close()
             with open(download_fp.name, 'rb') as fp:
                 self.assertEqual(expected_contents, fp.read())
Esempio n. 7
0
    def resolve_target(self, node_task, target, results_dir, node_paths):
        self._copy_sources(target, results_dir)

        with temporary_dir() as temp_dir:
            archive_file_name = urllib_parse.urlsplit(
                target.dependencies_archive_url).path.split('/')[-1]
            if not archive_file_name:
                raise TaskError(
                    'Could not determine archive file name for {target} from {url}'
                    .format(target=target.address.reference(),
                            url=target.dependencies_archive_url))

            download_path = os.path.join(temp_dir, archive_file_name)

            logger.info(
                'Downloading archive {archive_file_name} from '
                '{dependencies_archive_url} to {path}'.format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path))

            try:
                Fetcher(get_buildroot()).download(
                    target.dependencies_archive_url,
                    listener=Fetcher.ProgressListener(),
                    path_or_fd=download_path,
                    timeout_secs=self.get_options().fetch_timeout_secs)
            except Fetcher.Error as error:
                raise TaskError(
                    'Failed to fetch preinstalled node_modules for {target} from {url}: {error}'
                    .format(target=target.address.reference(),
                            url=target.dependencies_archive_url,
                            error=error))

            logger.info(
                'Fetched archive {archive_file_name} from {dependencies_archive_url} to {path}'
                .format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path))

            archiver_for_path(archive_file_name).extract(
                download_path, temp_dir)

            extracted_node_modules = os.path.join(temp_dir, 'node_modules')
            if not os.path.isdir(extracted_node_modules):
                raise TaskError(
                    'Did not find an extracted node_modules directory for {target} '
                    'inside {dependencies_archive_url}'.format(
                        target=target.address.reference(),
                        dependencies_archive_url=target.
                        dependencies_archive_url))

            shutil.move(extracted_node_modules,
                        os.path.join(results_dir, 'node_modules'))
    def test_execute_java_no_error_weird_path(self):
        """
        :API: public
        """
        with temporary_file(suffix=".jar") as temp_path:
            fetcher = Fetcher(get_buildroot())
            try:
                # Download a jar that echoes things.
                fetcher.download(
                    "https://maven-central.storage-download.googleapis.com/repos/central/data/io/get-coursier/echo/1.0.0/echo-1.0.0.jar",
                    path_or_fd=temp_path.name,
                    timeout_secs=2,
                )
            except fetcher.Error:
                self.fail("fail to download echo jar")

            task = self.execute(self.context([]))
            executor = task.create_java_executor()

            # Executing the jar as is should work.
            self.assertEqual(
                0,
                util.execute_java(
                    executor=executor,
                    classpath=[temp_path.name],
                    main="coursier.echo.Echo",
                    args=["Hello World"],
                    create_synthetic_jar=True,
                ),
            )

            # Rename the jar to contain reserved characters.
            new_path = os.path.join(os.path.dirname(temp_path.name),
                                    "%%!!!===++.jar")
            safe_concurrent_rename(temp_path.name, new_path)

            # Executing the new path should work.
            self.assertEqual(
                0,
                util.execute_java(
                    executor=executor,
                    classpath=[new_path],
                    main="coursier.echo.Echo",
                    args=["Hello World"],
                    create_synthetic_jar=True,
                ),
            )
Esempio n. 9
0
    def test_download_redirect(self):
        """Make sure that a server that returns a redirect is actually followed.

    Test with a real HTTP server that redirects from one URL to another.
    """

        fetcher = Fetcher('/unused/root/dir')
        with self.setup_server() as base_url:
            self._URL = base_url
            self.assertFalse(self._URL2_ACCESSED)
            self.assertFalse(self._URL1_ACCESSED)

            path = fetcher.download(base_url + '/url2')
            self.assertTrue(self._URL2_ACCESSED)
            self.assertTrue(self._URL1_ACCESSED)

            with open(path) as fp:
                self.assertEqual('returned from redirect\r\n', fp.read())
Esempio n. 10
0
    def fetch_prebuilt_wheels(self, binary_base_url, deploy_pants_wheels_path,
                              deploy_3rdparty_wheels_path, to_dir):
        wheel_paths = self.list_prebuilt_wheels(binary_base_url,
                                                deploy_pants_wheels_path,
                                                deploy_3rdparty_wheels_path)

        if not wheel_paths:
            raise ValueError("No wheels found.")

        # Fetching the wheels in parallel
        # It is okay to have some interleaving outputs from the fetcher,
        # because we are summarizing things in the end.
        fetcher = Fetcher(os.getcwd())
        checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
        futures = []
        with ThreadPoolExecutor(max_workers=8) as executor:
            for k in wheel_paths:
                file_path, url_path = k.split(self.OUTPUT_DELIMITER)
                dest = os.path.join(to_dir, file_path)
                safe_mkdir(os.path.dirname(dest))

                url = '{}/{}'.format(binary_base_url, url_path)
                future = executor.submit(self._download, fetcher, checksummer,
                                         url, dest)
                futures.append((future, url))

        # Summarize the fetch results.
        fail = False
        for future, url in futures:
            if future.exception() is not None:
                logger.error('Failed to download: {}'.format(url))
                fail = True
            else:
                logger.info('Downloaded: {}'.format(url))

        if fail:
            raise fetcher.Error()
Esempio n. 11
0
    def _default_http_fetcher(cls):
        """Return a fetcher that resolves local file paths against the build root.

    Currently this is used everywhere except in testing.
    """
        return Fetcher(get_buildroot())
Esempio n. 12
0
 def setUp(self):
     self.requests = mock.Mock(spec=requests.Session)
     self.response = mock.Mock(spec=requests.Response)
     self.fetcher = Fetcher('/unused/root/dir', requests_api=self.requests)
     self.listener = mock.create_autospec(Fetcher.Listener, spec_set=True)
    def resolve_target(self,
                       node_task,
                       target,
                       results_dir,
                       node_paths,
                       resolve_locally=False,
                       **kwargs):
        if not resolve_locally:
            self._copy_sources(target, results_dir)

        with temporary_dir() as temp_dir:
            archive_file_name = urllib.parse.urlsplit(
                target.dependencies_archive_url).path.split("/")[-1]
            if not archive_file_name:
                raise TaskError(
                    "Could not determine archive file name for {target} from {url}"
                    .format(target=target.address.reference(),
                            url=target.dependencies_archive_url))

            download_path = os.path.join(temp_dir, archive_file_name)

            node_task.context.log.info(
                "Downloading archive {archive_file_name} from "
                "{dependencies_archive_url} to {path}".format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path,
                ))

            try:
                Fetcher(get_buildroot()).download(
                    target.dependencies_archive_url,
                    listener=Fetcher.ProgressListener(),
                    path_or_fd=download_path,
                    timeout_secs=self.get_options().fetch_timeout_secs,
                )
            except Fetcher.Error as error:
                raise TaskError(
                    "Failed to fetch preinstalled node_modules for {target} from {url}: {error}"
                    .format(
                        target=target.address.reference(),
                        url=target.dependencies_archive_url,
                        error=error,
                    ))

            node_task.context.log.info(
                "Fetched archive {archive_file_name} from {dependencies_archive_url} to {path}"
                .format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path,
                ))

            archiver_for_path(archive_file_name).extract(
                download_path, temp_dir)

            extracted_node_modules = os.path.join(temp_dir, "node_modules")
            if not os.path.isdir(extracted_node_modules):
                raise TaskError(
                    "Did not find an extracted node_modules directory for {target} "
                    "inside {dependencies_archive_url}".format(
                        target=target.address.reference(),
                        dependencies_archive_url=target.
                        dependencies_archive_url,
                    ))

            # shutil.move doesn't handle directory collision nicely. This is mainly to address
            # installing within the source directory for local resolves.
            node_modules_path = os.path.join(results_dir, "node_modules")
            safe_rmtree(node_modules_path)
            shutil.move(extracted_node_modules, node_modules_path)