Exemplo n.º 1
0
    def test_java_compile_reads_resource_mapping(self):
        # Ensure that if an annotation processor produces a resource-mapping,
        # the artifact contains that resource mapping.

        with temporary_dir() as cache_dir:
            config = {"cache.compile.rsc": {"write_to": [cache_dir]}}

            self.assert_success(
                self.run_pants(
                    [
                        "compile",
                        "testprojects/src/java/org/pantsbuild/testproject/annotation/main",
                    ],
                    config=config,
                ))

            base_artifact_dir = self.get_cache_subdir(cache_dir)
            artifact_dir = os.path.join(
                base_artifact_dir,
                "testprojects.src.java.org.pantsbuild.testproject.annotation.main.main",
            )

            self.assertTrue(os.path.exists(artifact_dir))
            artifacts = os.listdir(artifact_dir)
            self.assertEqual(len(artifacts), 1)
            single_artifact = artifacts[0]

            with temporary_dir() as extract_dir:
                artifact_path = os.path.join(artifact_dir, single_artifact)
                archiver_for_path(artifact_path).extract(
                    artifact_path, extract_dir)
                all_files = set()
                for dirpath, dirs, files in safe_walk(extract_dir):
                    for name in files:
                        path = os.path.join(dirpath, name)
                        all_files.add(path)

                # Locate the report file on the classpath.
                report_file_name = "deprecation_report.txt"
                reports = [
                    f for f in all_files if f.endswith(report_file_name)
                ]
                self.assertEqual(
                    1,
                    len(reports),
                    "Expected exactly one {} file; got: {}".format(
                        report_file_name, all_files),
                )

                with open(reports[0], "r") as fp:
                    annotated_classes = [
                        line.rstrip() for line in fp.read().splitlines()
                    ]
                    self.assertEqual(
                        {
                            "org.pantsbuild.testproject.annotation.main.Main",
                            "org.pantsbuild.testproject.annotation.main.Main$TestInnerClass",
                        },
                        set(annotated_classes),
                    )
    def resolve_target(self, node_task, target, results_dir, node_paths):
        self._copy_sources(target, results_dir)

        with temporary_dir() as temp_dir:
            archive_file_name = urllib_parse.urlsplit(
                target.dependencies_archive_url).path.split('/')[-1]
            if not archive_file_name:
                raise TaskError(
                    'Could not determine archive file name for {target} from {url}'
                    .format(target=target.address.reference(),
                            url=target.dependencies_archive_url))

            download_path = os.path.join(temp_dir, archive_file_name)

            logger.info(
                'Downloading archive {archive_file_name} from '
                '{dependencies_archive_url} to {path}'.format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path))

            try:
                with closing(
                        urllib_request.urlopen(target.dependencies_archive_url)
                ) as opened_archive_url:
                    with safe_open(download_path, 'wb') as downloaded_archive:
                        downloaded_archive.write(opened_archive_url.read())
            except (IOError, urllib_error.HTTPError, urllib_error.URLError,
                    ValueError) as error:
                raise TaskError(
                    'Failed to fetch preinstalled node_modules for {target} from '
                    '{dependencies_archive_url}: {error}'.format(
                        target=target.address.reference(),
                        url=target.dependencies_archive_url,
                        error=error))

            logger.info(
                'Fetched archive {archive_file_name} from {dependencies_archive_url} to {path}'
                .format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path))

            archiver_for_path(archive_file_name).extract(
                download_path, temp_dir)

            extracted_node_modules = os.path.join(temp_dir, 'node_modules')
            if not os.path.isdir(extracted_node_modules):
                raise TaskError(
                    'Did not find an extracted node_modules directory for {target} '
                    'inside {dependencies_archive_url}'.format(
                        target=target.address.reference(),
                        dependencies_archive_url=target.
                        dependencies_archive_url))

            shutil.move(extracted_node_modules,
                        os.path.join(results_dir, 'node_modules'))
Exemplo n.º 3
0
    def resolve_target(self, node_task, target, results_dir, node_paths):
        self._copy_sources(target, results_dir)

        with temporary_dir() as temp_dir:
            archive_file_name = urllib_parse.urlsplit(
                target.dependencies_archive_url).path.split('/')[-1]
            if not archive_file_name:
                raise TaskError(
                    'Could not determine archive file name for {target} from {url}'
                    .format(target=target.address.reference(),
                            url=target.dependencies_archive_url))

            download_path = os.path.join(temp_dir, archive_file_name)

            logger.info(
                'Downloading archive {archive_file_name} from '
                '{dependencies_archive_url} to {path}'.format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path))

            try:
                Fetcher(get_buildroot()).download(
                    target.dependencies_archive_url,
                    listener=Fetcher.ProgressListener(),
                    path_or_fd=download_path,
                    timeout_secs=self.get_options().fetch_timeout_secs)
            except Fetcher.Error as error:
                raise TaskError(
                    'Failed to fetch preinstalled node_modules for {target} from {url}: {error}'
                    .format(target=target.address.reference(),
                            url=target.dependencies_archive_url,
                            error=error))

            logger.info(
                'Fetched archive {archive_file_name} from {dependencies_archive_url} to {path}'
                .format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path))

            archiver_for_path(archive_file_name).extract(
                download_path, temp_dir)

            extracted_node_modules = os.path.join(temp_dir, 'node_modules')
            if not os.path.isdir(extracted_node_modules):
                raise TaskError(
                    'Did not find an extracted node_modules directory for {target} '
                    'inside {dependencies_archive_url}'.format(
                        target=target.address.reference(),
                        dependencies_archive_url=target.
                        dependencies_archive_url))

            shutil.move(extracted_node_modules,
                        os.path.join(results_dir, 'node_modules'))
  def resolve_target(self, node_task, target, results_dir, node_paths, resolve_locally=False, **kwargs):
    if not resolve_locally:
      self._copy_sources(target, results_dir)

    with temporary_dir() as temp_dir:
      archive_file_name = urllib_parse.urlsplit(target.dependencies_archive_url).path.split('/')[-1]
      if not archive_file_name:
        raise TaskError('Could not determine archive file name for {target} from {url}'
                        .format(target=target.address.reference(),
                                url=target.dependencies_archive_url))

      download_path = os.path.join(temp_dir, archive_file_name)

      node_task.context.log.info(
        'Downloading archive {archive_file_name} from '
        '{dependencies_archive_url} to {path}'
        .format(archive_file_name=archive_file_name,
                dependencies_archive_url=target.dependencies_archive_url,
                path=download_path))

      try:
        Fetcher(get_buildroot()).download(target.dependencies_archive_url,
                                          listener=Fetcher.ProgressListener(),
                                          path_or_fd=download_path,
                                          timeout_secs=self.get_options().fetch_timeout_secs)
      except Fetcher.Error as error:
        raise TaskError('Failed to fetch preinstalled node_modules for {target} from {url}: {error}'
                        .format(target=target.address.reference(),
                                url=target.dependencies_archive_url,
                                error=error))

      node_task.context.log.info(
        'Fetched archive {archive_file_name} from {dependencies_archive_url} to {path}'
        .format(archive_file_name=archive_file_name,
                dependencies_archive_url=target.dependencies_archive_url,
                path=download_path))

      archiver_for_path(archive_file_name).extract(download_path, temp_dir)

      extracted_node_modules = os.path.join(temp_dir, 'node_modules')
      if not os.path.isdir(extracted_node_modules):
        raise TaskError('Did not find an extracted node_modules directory for {target} '
                        'inside {dependencies_archive_url}'
                        .format(target=target.address.reference(),
                                dependencies_archive_url=target.dependencies_archive_url))

      # shutil.move doesn't handle directory collision nicely. This is mainly to address
      # installing within the source directory for local resolves.
      node_modules_path = os.path.join(results_dir, 'node_modules')
      safe_rmtree(node_modules_path)
      shutil.move(extracted_node_modules, node_modules_path)
Exemplo n.º 5
0
def select(argv):
    # Parse positional arguments to the script.
    args = _create_bootstrap_binary_arg_parser().parse_args(argv[1:])
    # Resolve bootstrap options with a fake empty command line.
    options_bootstrapper = OptionsBootstrapper.create(args=[argv[0]])
    subsystems = (GlobalOptionsRegistrar, BinaryUtil.Factory)
    known_scope_infos = reduce(set.union,
                               (ss.known_scope_infos() for ss in subsystems),
                               set())
    options = options_bootstrapper.get_full_options(known_scope_infos)
    # Initialize Subsystems.
    Subsystem.set_options(options)

    # If the filename provided ends in a known archive extension (such as ".tar.gz"), then we get the
    # appropriate Archiver to pass to BinaryUtil.
    archiver_for_current_binary = None
    filename = args.filename or args.util_name
    try:
        archiver_for_current_binary = archiver_for_path(filename)
        # BinaryRequest requires the `name` field to be provided without an extension, as it appends the
        # archiver's extension if one is provided, so we have to remove it here.
        filename = filename[:-(len(archiver_for_current_binary.extension) + 1)]
    except ValueError:
        pass

    binary_util = BinaryUtil.Factory.create()
    binary_request = BinaryRequest(supportdir='bin/{}'.format(args.util_name),
                                   version=args.version,
                                   name=filename,
                                   platform_dependent=True,
                                   external_url_generator=None,
                                   archiver=archiver_for_current_binary)

    return binary_util.select(binary_request)
Exemplo n.º 6
0
  def fetch(self, import_path, dest, rev=None):
    match, url_info = self._matcher(import_path)
    pkg = GoRemoteLibrary.remote_package_path(self.root(import_path), import_path)
    archive_url = match.expand(url_info.url_format).format(rev=url_info.rev(rev), pkg=pkg)
    try:
      archiver = archiver_for_path(archive_url)
    except ValueError:
      raise self.FetchError("Don't know how to unpack archive at url {}".format(archive_url))

    with self._fetch(archive_url) as archive:
      if url_info.strip_level == 0:
        archiver.extract(archive, dest)
      else:
        with temporary_dir() as scratch:
          archiver.extract(archive, scratch)
          for dirpath, dirnames, filenames in os.walk(scratch, topdown=True):
            if dirpath != scratch:
              relpath = os.path.relpath(dirpath, scratch)
              relpath_components = relpath.split(os.sep)
              if len(relpath_components) == url_info.strip_level and (dirnames or filenames):
                for path in dirnames + filenames:
                  src = os.path.join(dirpath, path)
                  dst = os.path.join(dest, path)
                  shutil.move(src, dst)
                del dirnames[:]  # Stops the walk.
Exemplo n.º 7
0
    def fetch_archive(self, archive_url, strip_level, dest):
        try:
            archiver = archiver_for_path(archive_url)
        except ValueError:
            raise FetchError(
                f"Don't know how to unpack archive at url {archive_url}")

        with self._fetch(archive_url) as archive:
            if strip_level == 0:
                archiver.extract(archive, dest)
            else:
                with temporary_dir() as scratch:
                    archiver.extract(archive, scratch)
                    for dirpath, dirnames, filenames in os.walk(scratch,
                                                                topdown=True):
                        if dirpath != scratch:
                            relpath = os.path.relpath(dirpath, scratch)
                            relpath_components = relpath.split(os.sep)
                            if len(relpath_components) == strip_level and (
                                    dirnames or filenames):
                                for path in dirnames + filenames:
                                    src = os.path.join(dirpath, path)
                                    dst = os.path.join(dest, path)
                                    shutil.move(src, dst)
                                del dirnames[:]  # Stops the walk.
Exemplo n.º 8
0
def select(argv):
  # Parse positional arguments to the script.
  args = _create_bootstrap_binary_arg_parser().parse_args(argv[1:])
  # Resolve bootstrap options with a fake empty command line.
  options_bootstrapper = OptionsBootstrapper.create(args=[argv[0]])
  subsystems = (GlobalOptionsRegistrar, BinaryUtil.Factory)
  known_scope_infos = reduce(set.union, (ss.known_scope_infos() for ss in subsystems), set())
  options = options_bootstrapper.get_full_options(known_scope_infos)
  # Initialize Subsystems.
  Subsystem.set_options(options)

  # If the filename provided ends in a known archive extension (such as ".tar.gz"), then we get the
  # appropriate Archiver to pass to BinaryUtil.
  archiver_for_current_binary = None
  filename = args.filename or args.util_name
  try:
    archiver_for_current_binary = archiver_for_path(filename)
    # BinaryRequest requires the `name` field to be provided without an extension, as it appends the
    # archiver's extension if one is provided, so we have to remove it here.
    filename = filename[:-(len(archiver_for_current_binary.extension) + 1)]
  except ValueError:
    pass

  binary_util = BinaryUtil.Factory.create()
  binary_request = BinaryRequest(
    supportdir='bin/{}'.format(args.util_name),
    version=args.version,
    name=filename,
    platform_dependent=True,
    external_url_generator=None,
    archiver=archiver_for_current_binary)

  return binary_util.select(binary_request)
  def test_java_compile_reads_resource_mapping(self):
    # Ensure that if an annotation processor produces a resource-mapping,
    # the artifact contains that resource mapping.

    with temporary_dir() as cache_dir:
      config = {'cache.compile.zinc': {'write_to': [cache_dir]}}

      self.assert_success(self.run_pants([
        'compile',
        'testprojects/src/java/org/pantsbuild/testproject/annotation/main',
      ], config=config))

      base_artifact_dir = self.get_cache_subdir(cache_dir)
      artifact_dir = os.path.join(
        base_artifact_dir,
        'testprojects.src.java.org.pantsbuild.testproject.annotation.main.main',
      )

      self.assertTrue(os.path.exists(artifact_dir))
      artifacts = os.listdir(artifact_dir)
      self.assertEqual(len(artifacts), 1)
      single_artifact = artifacts[0]

      with temporary_dir() as extract_dir:
        artifact_path = os.path.join(artifact_dir, single_artifact)
        archiver_for_path(artifact_path).extract(artifact_path, extract_dir)
        all_files = set()
        for dirpath, dirs, files in safe_walk(extract_dir):
          for name in files:
            path = os.path.join(dirpath, name)
            all_files.add(path)

        # Locate the report file on the classpath.
        report_file_name = 'deprecation_report.txt'
        reports = [f for f in all_files if f.endswith(report_file_name)]
        self.assertEqual(1, len(reports),
                          'Expected exactly one {} file; got: {}'.format(report_file_name,
                                                                         all_files))

        with open(reports[0], 'r') as fp:
          annotated_classes = [line.rstrip() for line in fp.read().splitlines()]
          self.assertEqual(
            {'org.pantsbuild.testproject.annotation.main.Main',
             'org.pantsbuild.testproject.annotation.main.Main$TestInnerClass'},
            set(annotated_classes))
  def resolve_target(self, node_task, target, results_dir, node_paths):
    self._copy_sources(target, results_dir)

    with temporary_dir() as temp_dir:
      archive_file_name = urllib_parse.urlsplit(target.dependencies_archive_url).path.split('/')[-1]
      if not archive_file_name:
        raise TaskError('Could not determine archive file name for {target} from {url}'
                        .format(target=target.address.reference(),
                                url=target.dependencies_archive_url))

      download_path = os.path.join(temp_dir, archive_file_name)

      logger.info('Downloading archive {archive_file_name} from '
                  '{dependencies_archive_url} to {path}'
                  .format(archive_file_name=archive_file_name,
                          dependencies_archive_url=target.dependencies_archive_url,
                          path=download_path))

      try:
        Fetcher().download(target.dependencies_archive_url,
                           listener=Fetcher.ProgressListener(),
                           path_or_fd=download_path,
                           timeout_secs=self.get_options().fetch_timeout_secs)
      except Fetcher.Error as error:
        raise TaskError('Failed to fetch preinstalled node_modules for {target} from {url}: {error}'
                        .format(target=target.address.reference(),
                                url=target.dependencies_archive_url,
                                error=error))

      logger.info('Fetched archive {archive_file_name} from {dependencies_archive_url} to {path}'
                  .format(archive_file_name=archive_file_name,
                          dependencies_archive_url=target.dependencies_archive_url,
                          path=download_path))

      archiver_for_path(archive_file_name).extract(download_path, temp_dir)

      extracted_node_modules = os.path.join(temp_dir, 'node_modules')
      if not os.path.isdir(extracted_node_modules):
        raise TaskError('Did not find an extracted node_modules directory for {target} '
                        'inside {dependencies_archive_url}'
                        .format(target=target.address.reference(),
                                dependencies_archive_url=target.dependencies_archive_url))

      shutil.move(extracted_node_modules, os.path.join(results_dir, 'node_modules'))
 def _extract_archive(self, archive_path):
   with temporary_dir() as temp_dir:
     _, extension = os.path.splitext(archive_path)
     print (extension)
     if extension == '.jar':
       extraction_archiver = archiver('zip')
     else:
       extraction_archiver = archiver_for_path(os.path.basename(archive_path))
     extraction_archiver.extract(archive_path, temp_dir)
     yield temp_dir
  def resolve_target(self, node_task, target, results_dir, node_paths):
    self._copy_sources(target, results_dir)

    with temporary_dir() as temp_dir:
      archive_file_name = urllib_parse.urlsplit(target.dependencies_archive_url).path.split('/')[-1]
      if not archive_file_name:
        raise TaskError('Could not determine archive file name for {target} from {url}'
                        .format(target=target.address.reference(),
                                url=target.dependencies_archive_url))

      download_path = os.path.join(temp_dir, archive_file_name)

      logger.info('Downloading archive {archive_file_name} from '
                  '{dependencies_archive_url} to {path}'
                  .format(archive_file_name=archive_file_name,
                          dependencies_archive_url=target.dependencies_archive_url,
                          path=download_path))

      try:
        with closing(urllib_request.urlopen(target.dependencies_archive_url)) as opened_archive_url:
          with safe_open(download_path, 'wb') as downloaded_archive:
            downloaded_archive.write(opened_archive_url.read())
      except (IOError, urllib_error.HTTPError, urllib_error.URLError, ValueError) as error:
        raise TaskError('Failed to fetch preinstalled node_modules for {target} from '
                        '{dependencies_archive_url}: {error}'
                        .format(target=target.address.reference(),
                                url=target.dependencies_archive_url, error=error))

      logger.info('Fetched archive {archive_file_name} from {dependencies_archive_url} to {path}'
                  .format(archive_file_name=archive_file_name,
                          dependencies_archive_url=target.dependencies_archive_url,
                          path=download_path))

      archiver_for_path(archive_file_name).extract(download_path, temp_dir)

      extracted_node_modules = os.path.join(temp_dir, 'node_modules')
      if not os.path.isdir(extracted_node_modules):
        raise TaskError('Did not find an extracted node_modules directory for {target} '
                        'inside {dependencies_archive_url}'
                        .format(target=target.address.reference(),
                                dependencies_archive_url=target.dependencies_archive_url))

      shutil.move(extracted_node_modules, os.path.join(results_dir, 'node_modules'))
 def _extract_archive(self, archive_path):
   with temporary_dir() as temp_dir:
     _, extension = os.path.splitext(archive_path)
     print(extension)
     if extension == '.jar':
       extraction_archiver = create_archiver('zip')
     else:
       extraction_archiver = archiver_for_path(os.path.basename(archive_path))
     extraction_archiver.extract(archive_path, temp_dir)
     yield temp_dir
Exemplo n.º 14
0
 def _construct_path(self, context=None):
     fetched = self.select(context)
     if not self._extract:
         return fetched
     unpacked_dir = os.path.dirname(fetched)
     outdir = os.path.join(unpacked_dir, 'unpacked')
     if not os.path.exists(outdir):
         with temporary_dir(root_dir=unpacked_dir) as tmp_root:
             # This is an upstream lever that pattern matches the filepath to an archive type.
             archiver = archiver_for_path(fetched)
             archiver.extract(fetched, tmp_root)
             os.rename(tmp_root, outdir)
     return os.path.join(outdir)
Exemplo n.º 15
0
 def _construct_path(self, context=None):
   fetched = self.select(context)
   if not self._extract:
     return fetched
   unpacked_dir = os.path.dirname(fetched)
   outdir = os.path.join(unpacked_dir, 'unpacked')
   if not os.path.exists(outdir):
     with temporary_dir(root_dir=unpacked_dir) as tmp_root:
       # This is an upstream lever that pattern matches the filepath to an archive type.
       archiver = archiver_for_path(fetched)
       archiver.extract(fetched, tmp_root)
       os.rename(tmp_root, outdir)
   return os.path.join(outdir)
Exemplo n.º 16
0
 def _construct_path(self):
   fetched = self.remote_source_util.select_binary(self._relpath, self.version, self._filename)
   if not self._extract:
     return fetched
   unpacked_dir = os.path.dirname(fetched)
   outdir = os.path.join(unpacked_dir, 'unpacked')
   if not os.path.exists(outdir):
     with temporary_dir(root_dir=unpacked_dir) as tmp_root:
       # This is an upstream lever that pattern matches the filepath to an archive type.
       archiver = archiver_for_path(fetched)
       archiver.extract(fetched, tmp_root)
       os.rename(tmp_root, outdir)
   return os.path.join(outdir)
Exemplo n.º 17
0
  def fetch_archive(self, archive_url, strip_level, dest):
    try:
      archiver = archiver_for_path(archive_url)
    except ValueError:
      raise FetchError("Don't know how to unpack archive at url {}".format(archive_url))

    with self._fetch(archive_url) as archive:
      if strip_level == 0:
        archiver.extract(archive, dest)
      else:
        with temporary_dir() as scratch:
          archiver.extract(archive, scratch)
          for dirpath, dirnames, filenames in os.walk(scratch, topdown=True):
            if dirpath != scratch:
              relpath = os.path.relpath(dirpath, scratch)
              relpath_components = relpath.split(os.sep)
              if len(relpath_components) == strip_level and (dirnames or filenames):
                for path in dirnames + filenames:
                  src = os.path.join(dirpath, path)
                  dst = os.path.join(dest, path)
                  shutil.move(src, dst)
                del dirnames[:]  # Stops the walk.
Exemplo n.º 18
0
    def resolve_target(self,
                       node_task,
                       target,
                       results_dir,
                       node_paths,
                       resolve_locally=False,
                       **kwargs):
        if not resolve_locally:
            self._copy_sources(target, results_dir)

        with temporary_dir() as temp_dir:
            archive_file_name = urllib.parse.urlsplit(
                target.dependencies_archive_url).path.split("/")[-1]
            if not archive_file_name:
                raise TaskError(
                    "Could not determine archive file name for {target} from {url}"
                    .format(target=target.address.reference(),
                            url=target.dependencies_archive_url))

            download_path = os.path.join(temp_dir, archive_file_name)

            node_task.context.log.info(
                "Downloading archive {archive_file_name} from "
                "{dependencies_archive_url} to {path}".format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path,
                ))

            try:
                Fetcher(get_buildroot()).download(
                    target.dependencies_archive_url,
                    listener=Fetcher.ProgressListener(),
                    path_or_fd=download_path,
                    timeout_secs=self.get_options().fetch_timeout_secs,
                )
            except Fetcher.Error as error:
                raise TaskError(
                    "Failed to fetch preinstalled node_modules for {target} from {url}: {error}"
                    .format(
                        target=target.address.reference(),
                        url=target.dependencies_archive_url,
                        error=error,
                    ))

            node_task.context.log.info(
                "Fetched archive {archive_file_name} from {dependencies_archive_url} to {path}"
                .format(
                    archive_file_name=archive_file_name,
                    dependencies_archive_url=target.dependencies_archive_url,
                    path=download_path,
                ))

            archiver_for_path(archive_file_name).extract(
                download_path, temp_dir)

            extracted_node_modules = os.path.join(temp_dir, "node_modules")
            if not os.path.isdir(extracted_node_modules):
                raise TaskError(
                    "Did not find an extracted node_modules directory for {target} "
                    "inside {dependencies_archive_url}".format(
                        target=target.address.reference(),
                        dependencies_archive_url=target.
                        dependencies_archive_url,
                    ))

            # shutil.move doesn't handle directory collision nicely. This is mainly to address
            # installing within the source directory for local resolves.
            node_modules_path = os.path.join(results_dir, "node_modules")
            safe_rmtree(node_modules_path)
            shutil.move(extracted_node_modules, node_modules_path)