Esempio n. 1
0
 def test_atomic_copy(self):
   with temporary_file() as src:
     src.write(src.name)
     src.flush()
     with temporary_file() as dst:
       atomic_copy(src.name, dst.name)
       dst.close()
       with open(dst.name) as new_dst:
         self.assertEquals(src.name, new_dst.read())
  def test_create_bootstrapped_multiple_config_override(self):
    # check with multiple config files, the latest values always get taken
    # in this case worker_count will be overwritten, while fruit stays the same
    with temporary_file() as fp:
      fp.write(dedent("""
      [compile.apt]
      worker_count: 1

      [fruit]
      apple: red
      """))
      fp.close()

      args = ['--config-override={}'.format(fp.name)] + self._config_path(fp.name)
      bootstrapper_single_config = OptionsBootstrapper(args=args)

      opts_single_config  = bootstrapper_single_config.get_full_options(known_scope_infos=[
          ScopeInfo('', ScopeInfo.GLOBAL),
          ScopeInfo('compile.apt', ScopeInfo.TASK),
          ScopeInfo('fruit', ScopeInfo.TASK),
      ])
      # So we don't choke on these on the cmd line.
      opts_single_config.register('', '--pants-config-files')
      opts_single_config.register('', '--config-override', type=list)

      opts_single_config.register('compile.apt', '--worker-count')
      opts_single_config.register('fruit', '--apple')

      self.assertEquals('1', opts_single_config.for_scope('compile.apt').worker_count)
      self.assertEquals('red', opts_single_config.for_scope('fruit').apple)

      with temporary_file() as fp2:
        fp2.write(dedent("""
        [compile.apt]
        worker_count: 2
        """))
        fp2.close()

        args = ['--config-override={}'.format(fp.name),
                '--config-override={}'.format(fp2.name)] + self._config_path(fp.name)

        bootstrapper_double_config = OptionsBootstrapper(args=args)

        opts_double_config = bootstrapper_double_config.get_full_options(known_scope_infos=[
          ScopeInfo('', ScopeInfo.GLOBAL),
          ScopeInfo('compile.apt', ScopeInfo.TASK),
          ScopeInfo('fruit', ScopeInfo.TASK),
        ])
        # So we don't choke on these on the cmd line.
        opts_double_config.register('', '--pants-config-files')
        opts_double_config.register('', '--config-override', type=list)
        opts_double_config.register('compile.apt', '--worker-count')
        opts_double_config.register('fruit', '--apple')

        self.assertEquals('2', opts_double_config.for_scope('compile.apt').worker_count)
        self.assertEquals('red', opts_double_config.for_scope('fruit').apple)
Esempio n. 3
0
 def test_atomic_copy(self):
   with temporary_file() as src:
     src.write(src.name.encode('utf-8'))
     src.flush()
     with temporary_file() as dst:
       atomic_copy(src.name, dst.name)
       dst.close()
       with open(dst.name) as new_dst:
         self.assertEquals(src.name, new_dst.read())
       self.assertEqual(os.stat(src.name).st_mode, os.stat(dst.name).st_mode)
  def test_create_bootstrapped_multiple_config_override(self):
    # check with multiple config files, the latest values always get taken
    # in this case strategy will be overwritten, while fruit stays the same
    with temporary_file() as fp:
      fp.write(dedent("""
      [compile.apt]
      strategy: global

      [fruit]
      apple: red
      """))
      fp.close()

      bootstrapper_single_config = OptionsBootstrapper(configpath=fp.name,
                                                       args=['--config-override={}'.format(fp.name)])

      opts_single_config  = bootstrapper_single_config.get_full_options(known_scope_infos=[
          ScopeInfo('', ScopeInfo.GLOBAL),
          ScopeInfo('compile.apt', ScopeInfo.TASK),
          ScopeInfo('fruit', ScopeInfo.TASK),
      ])
      opts_single_config.register('', '--config-override')  # So we don't choke on it on the cmd line.
      opts_single_config.register('compile.apt', '--strategy')
      opts_single_config.register('fruit', '--apple')

      self.assertEquals('global', opts_single_config.for_scope('compile.apt').strategy)
      self.assertEquals('red', opts_single_config.for_scope('fruit').apple)

      with temporary_file() as fp2:
        fp2.write(dedent("""
        [compile.apt]
        strategy: isolated
        """))
        fp2.close()

        bootstrapper_double_config = OptionsBootstrapper(
            configpath=fp.name,
            args=['--config-override={}'.format(fp.name),
                  '--config-override={}'.format(fp2.name)])

        opts_double_config = bootstrapper_double_config.get_full_options(known_scope_infos=[
          ScopeInfo('', ScopeInfo.GLOBAL),
          ScopeInfo('compile.apt', ScopeInfo.TASK),
          ScopeInfo('fruit', ScopeInfo.TASK),
        ])
        opts_double_config.register('', '--config-override')  # So we don't choke on it on the cmd line.
        opts_double_config.register('compile.apt', '--strategy')
        opts_double_config.register('fruit', '--apple')

        self.assertEquals('isolated', opts_double_config.for_scope('compile.apt').strategy)
        self.assertEquals('red', opts_double_config.for_scope('fruit').apple)
Esempio n. 5
0
  def setUp(self):
    self.ini1_content = textwrap.dedent(
      """
      [DEFAULT]
      name: foo
      answer: 42
      scale: 1.2
      path: /a/b/%(answer)s
      embed: %(path)s::foo
      disclaimer:
        Let it be known
        that.
      blank_section:

      [a]
      list: [1, 2, 3, %(answer)s]
      listappend: +[7, 8, 9]

      [b]
      preempt: True
      dict: {
          'a': 1,
          'b': %(answer)s,
          'c': ['%(answer)s', %(answer)s]
        }
      """
    )

    self.ini2_content = textwrap.dedent(
      """
      [a]
      fast: True

      [b]
      preempt: False

      [defined_section]
      """
    )

    with temporary_file(binary_mode=False) as ini1:
      ini1.write(self.ini1_content)
      ini1.close()

      with temporary_file(binary_mode=False) as ini2:
        ini2.write(self.ini2_content)
        ini2.close()
        self.config = Config.load(config_paths=[ini1.name, ini2.name])
        self.assertEqual([ini1.name, ini2.name], self.config.sources())
Esempio n. 6
0
  def test_override_single_variable(self):
    with temporary_file() as output:
      # test that the override takes place
      with environment_as(HORK='BORK'):
        subprocess.Popen([sys.executable, '-c', 'import os; print(os.environ["HORK"])'],
                         stdout=output).wait()
        output.seek(0)
        self.assertEquals('BORK\n', output.read())

      # test that the variable is cleared
      with temporary_file() as new_output:
        subprocess.Popen([sys.executable, '-c', 'import os; print("HORK" in os.environ)'],
                         stdout=new_output).wait()
        new_output.seek(0)
        self.assertEquals('False\n', new_output.read())
Esempio n. 7
0
  def test_hardlink_or_copy_cross_device_should_copy(self):
    content = b'hello'

    # Mock os.link to throw an CROSS-DEVICE error
    with mock.patch('os.link') as os_mock:
      err = OSError()
      err.errno = errno.EXDEV
      os_mock.side_effect = err

      with temporary_dir() as src_dir, temporary_file() as dst:
        dst.write(content)
        dst.close()

        src_path = os.path.join(src_dir, 'src')

        safe_hardlink_or_copy(dst.name, src_path)

        with open(src_path, 'rb') as f:
          self.assertEqual(content, f.read())

        # Make sure it's not symlink
        self.assertFalse(os.path.islink(dst.name))

        # Make sure they are separate copies
        self.assertFalse(self._is_hard_link(dst.name, src_path))
Esempio n. 8
0
 def setup_test_file(self, parent):
   with temporary_file(parent) as f:
     # Write the file.
     f.write(TEST_CONTENT1)
     path = f.name
     f.close()
     yield path
Esempio n. 9
0
  def nsutil_pex(self):
    interpreter = self.context.products.get_data(PythonInterpreter)
    chroot = os.path.join(self.workdir, 'nsutil', interpreter.version_string)
    if not os.path.exists(chroot):
      pex_info = PexInfo.default(interpreter=interpreter)
      with safe_concurrent_creation(chroot) as scratch:
        builder = PEXBuilder(path=scratch, interpreter=interpreter, pex_info=pex_info, copy=True)
        with temporary_file(binary_mode=False) as fp:
          declares_namespace_package_code = inspect.getsource(declares_namespace_package)
          fp.write(textwrap.dedent("""
            import sys


            {declares_namespace_package_code}


            if __name__ == '__main__':
              for path in sys.argv[1:]:
                if declares_namespace_package(path):
                  print(path)
          """).strip().format(declares_namespace_package_code=declares_namespace_package_code))
          fp.close()
          builder.set_executable(filename=fp.name, env_filename='main.py')
          builder.freeze()
    return PEX(pex=chroot, interpreter=interpreter)
Esempio n. 10
0
  def _bootstrap_ivy(self, bootstrap_jar_path):
    options = self._ivy_subsystem.get_options()
    if not os.path.exists(bootstrap_jar_path):
      with temporary_file() as bootstrap_jar:
        fetcher = Fetcher()
        checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
        try:
          logger.info('\nDownloading {}'.format(options.bootstrap_jar_url))
          # TODO: Capture the stdout of the fetcher, instead of letting it output
          # to the console directly.
          fetcher.download(options.bootstrap_jar_url,
                           listener=fetcher.ProgressListener().wrap(checksummer),
                           path_or_fd=bootstrap_jar,
                           timeout_secs=options.bootstrap_fetch_timeout_secs)
          logger.info('sha1: {}'.format(checksummer.checksum))
          bootstrap_jar.close()
          touch(bootstrap_jar_path)
          shutil.move(bootstrap_jar.name, bootstrap_jar_path)
        except fetcher.Error as e:
          raise self.Error('Problem fetching the ivy bootstrap jar! {}'.format(e))

    return Ivy(bootstrap_jar_path,
               ivy_settings=options.bootstrap_ivy_settings or options.ivy_settings,
               ivy_cache_dir=options.cache_dir,
               extra_jvm_options=self._ivy_subsystem.extra_jvm_options())
Esempio n. 11
0
  def load_coverage_data_for(self, context, covered_path, expect_coverage=True):
    data_file = self.coverage_data_file()
    self.assertEqual(expect_coverage, os.path.isfile(data_file))
    if expect_coverage:
      python_sources = context.products.get_data(GatherSources.PYTHON_SOURCES)
      covered_relpath = os.path.relpath(covered_path, self.build_root)
      owning_targets = [t for t in context.targets()
                        if covered_relpath in t.sources_relative_to_buildroot()]
      self.assertEqual(1, len(owning_targets))
      owning_target = owning_targets[0]

      src_chroot_path = python_sources.path()
      src_root_abspath = os.path.join(self.build_root, owning_target.target_base)
      covered_src_root_relpath = os.path.relpath(covered_path, src_root_abspath)
      chroot_path = os.path.join(src_chroot_path, covered_src_root_relpath)

      cp = configparser.SafeConfigParser()
      src_to_target_base = {src: tgt.target_base
                            for tgt in context.targets()
                            for src in tgt.sources_relative_to_source_root()}

      PytestRun._add_plugin_config(cp,
                                   src_chroot_path=src_chroot_path,
                                   src_to_target_base=src_to_target_base)
      with temporary_file(binary_mode=False) as fp:
        cp.write(fp)
        fp.close()

        coverage_data = coverage.coverage(config_file=fp.name, data_file=data_file)
        coverage_data.load()

      _, all_statements, not_run_statements, _ = coverage_data.analysis(chroot_path)
      return all_statements, not_run_statements
Esempio n. 12
0
  def _select_binary_stream(self, name, urls):
    """Download a file from a list of urls, yielding a stream after downloading the file.

    URLs are tried in order until they succeed.

    :raises: :class:`BinaryToolFetcher.BinaryNotFound` if requests to all the given urls fail.
    """
    downloaded_successfully = False
    accumulated_errors = []
    for url in OrderedSet(urls):  # De-dup URLS: we only want to try each URL once.
      logger.info('Attempting to fetch {name} binary from: {url} ...'.format(name=name, url=url))
      try:
        with temporary_file() as dest:
          logger.debug("in BinaryToolFetcher: url={}, timeout_secs={}"
                       .format(url, self._timeout_secs))
          self._fetcher.download(url,
                                 listener=Fetcher.ProgressListener(),
                                 path_or_fd=dest,
                                 timeout_secs=self._timeout_secs)
          logger.info('Fetched {name} binary from: {url} .'.format(name=name, url=url))
          downloaded_successfully = True
          dest.seek(0)
          yield dest
          break
      except (IOError, Fetcher.Error, ValueError) as e:
        accumulated_errors.append('Failed to fetch binary from {url}: {error}'
                                  .format(url=url, error=e))
    if not downloaded_successfully:
      raise self.BinaryNotFound(name, accumulated_errors)
    def test_create_bootstrapped_options(self):
        # Check that we can set a bootstrap option from a cmd-line flag and have that interpolate
        # correctly into regular config.
        with temporary_file() as fp:
            fp.write(
                dedent(
                    """
      [foo]
      bar: %(pants_workdir)s/baz

      [fruit]
      apple: %(pants_supportdir)s/banana
      """
                )
            )
            fp.close()
            args = ["--pants-workdir=/qux"] + self._config_path(fp.name)
            bootstrapper = OptionsBootstrapper(env={"PANTS_SUPPORTDIR": "/pear"}, args=args)
            opts = bootstrapper.get_full_options(
                known_scope_infos=[
                    ScopeInfo("", ScopeInfo.GLOBAL),
                    ScopeInfo("foo", ScopeInfo.TASK),
                    ScopeInfo("fruit", ScopeInfo.TASK),
                ]
            )
            # So we don't choke on these on the cmd line.
            opts.register("", "--pants-workdir")
            opts.register("", "--pants-config-files")

            opts.register("foo", "--bar")
            opts.register("fruit", "--apple")
        self.assertEquals("/qux/baz", opts.for_scope("foo").bar)
        self.assertEquals("/pear/banana", opts.for_scope("fruit").apple)
Esempio n. 14
0
  def cov_setup(targets):
    cp = generate_coverage_config(targets)
    with temporary_file(cleanup=False) as fp:
      cp.write(fp)
      filename = fp.name

    def compute_coverage_modules(target):
      if target.coverage:
        return target.coverage
      else:
        # This technically makes the assumption that tests/python/<target> will be testing
        # src/python/<target>.  To change to honest measurements, do target.walk() here instead,
        # however this results in very useless and noisy coverage reports.
        # Note in particular that this doesn't work for pants's own tests, as those are under
        # the top level package 'pants_tests', rather than just 'pants'.
        return set(os.path.dirname(source).replace(os.sep, '.')
                   for source in target.sources_relative_to_source_root())

    coverage_modules = set(itertools.chain(*[compute_coverage_modules(t) for t in targets]))
    args = ['-p', 'pytest_cov',
            '--cov-config', filename,
            '--cov-report', 'html',
            '--cov-report', 'term']
    for module in coverage_modules:
      args.extend(['--cov', module])
    return filename, args
  def test_create_bootstrapped_options(self):
    # Check that we can set a bootstrap option from a cmd-line flag and have that interpolate
    # correctly into regular config.
    with temporary_file() as fp:
      fp.write(dedent("""
      [foo]
      bar: %(pants_workdir)s/baz

      [fruit]
      apple: %(pants_supportdir)s/banana
      """))
      fp.close()
      bootstrapper = OptionsBootstrapper(env={
                                           'PANTS_SUPPORTDIR': '/pear'
                                         },
                                         configpath=fp.name,
                                         args=['--pants-workdir=/qux'])
      opts = bootstrapper.get_full_options(known_scope_infos=[
        ScopeInfo('', ScopeInfo.GLOBAL),
        ScopeInfo('foo', ScopeInfo.TASK),
        ScopeInfo('fruit', ScopeInfo.TASK)
      ])
      opts.register('', '--pants-workdir')  # So we don't choke on it on the cmd line.
      opts.register('foo', '--bar')
      opts.register('fruit', '--apple')
    self.assertEquals('/qux/baz', opts.for_scope('foo').bar)
    self.assertEquals('/pear/banana', opts.for_scope('fruit').apple)
Esempio n. 16
0
  def test_combined_cache(self):
    """Make sure that the combined cache finds what it should and that it backfills."""
    httpd = None
    httpd_thread = None
    try:
      with temporary_dir() as http_root:
        with temporary_dir() as cache_root:
          with pushd(http_root):  # SimpleRESTHandler serves from the cwd.
            httpd = SocketServer.TCPServer(('localhost', 0), SimpleRESTHandler)
            port = httpd.server_address[1]
            httpd_thread = Thread(target=httpd.serve_forever)
            httpd_thread.start()
            with temporary_dir() as artifact_root:
              local = LocalArtifactCache(None, artifact_root, cache_root)
              remote = RESTfulArtifactCache(MockLogger(), artifact_root, 'http://localhost:%d' % port)
              combined = CombinedArtifactCache([local, remote])

              key = CacheKey('muppet_key', 'fake_hash', 42)

              with temporary_file(artifact_root) as f:
                # Write the file.
                f.write(TEST_CONTENT1)
                path = f.name
                f.close()

                # No cache has key.
                self.assertFalse(local.has(key))
                self.assertFalse(remote.has(key))
                self.assertFalse(combined.has(key))

                # No cache returns key.
                self.assertFalse(bool(local.use_cached_files(key)))
                self.assertFalse(bool(remote.use_cached_files(key)))
                self.assertFalse(bool(combined.use_cached_files(key)))

                # Attempting to use key that no cache had should not change anything.
                self.assertFalse(local.has(key))
                self.assertFalse(remote.has(key))
                self.assertFalse(combined.has(key))

                # Add to only remote cache.
                remote.insert(key, [path])

                self.assertFalse(local.has(key))
                self.assertTrue(remote.has(key))
                self.assertTrue(combined.has(key))

                # Successfully using via remote should NOT change local.
                self.assertTrue(bool(remote.use_cached_files(key)))
                self.assertFalse(local.has(key))

                # Successfully using via combined SHOULD backfill local.
                self.assertTrue(bool(combined.use_cached_files(key)))
                self.assertTrue(local.has(key))
                self.assertTrue(bool(local.use_cached_files(key)))
    finally:
      if httpd:
        httpd.shutdown()
      if httpd_thread:
        httpd_thread.join()
Esempio n. 17
0
 def _exercise_module(self, pex, expected_module):
   with temporary_file() as f:
     f.write('import {m}; print({m}.__file__)'.format(m=expected_module))
     f.close()
     proc = pex.run(args=[f.name], blocking=False,
                    stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     return proc.communicate()
Esempio n. 18
0
  def do_test_artifact_cache(self, artifact_cache):
    key = CacheKey('muppet_key', 'fake_hash', 42)
    with temporary_file(artifact_cache.artifact_root) as f:
      # Write the file.
      f.write(TEST_CONTENT1)
      path = f.name
      f.close()

      # Cache it.
      self.assertFalse(artifact_cache.has(key))
      self.assertFalse(bool(artifact_cache.use_cached_files(key)))
      artifact_cache.insert(key, [path])
      self.assertTrue(artifact_cache.has(key))

      # Stomp it.
      with open(path, 'w') as outfile:
        outfile.write(TEST_CONTENT2)

      # Recover it from the cache.
      self.assertTrue(bool(artifact_cache.use_cached_files(key)))

      # Check that it was recovered correctly.
      with open(path, 'r') as infile:
        content = infile.read()
      self.assertEquals(content, TEST_CONTENT1)

      # Delete it.
      artifact_cache.delete(key)
      self.assertFalse(artifact_cache.has(key))
Esempio n. 19
0
  def setUp(self):
    with temporary_file() as ini:
      ini.write(
'''
[DEFAULT]
answer: 42
scale: 1.2
path: /a/b/%(answer)s
embed: %(path)s::foo
disclaimer:
  Let it be known
  that.

[a]
fast: True
list: [1, 2, 3, %(answer)s]

[b]
preempt: False
dict: {
    'a': 1,
    'b': %(answer)s,
    'c': ['%(answer)s', %(answer)s]
  }
''')
      ini.close()
      self.config = Config.load(configpath=ini.name)
Esempio n. 20
0
  def use_cached_files(self, cache_key):
    # This implementation fetches the appropriate tarball and extracts it.
    remote_path = self._remote_path_for_key(cache_key)
    try:
      # Send an HTTP request for the tarball.
      response = self._request('GET', remote_path)
      if response is None:
        return None

      with temporary_file() as outfile:
        total_bytes = 0
        # Read the data in a loop.
        for chunk in response.iter_content(self.READ_SIZE_BYTES):
          outfile.write(chunk)
          total_bytes += len(chunk)

        outfile.close()
        self.log.debug('Read %d bytes from artifact cache at %s' %
                       (total_bytes,self._url_string(remote_path)))

        # Extract the tarfile.
        artifact = TarballArtifact(self.artifact_root, outfile.name, self.compress)
        artifact.extract()
        return artifact
    except Exception as e:
      self.log.warn('Error while reading from remote artifact cache: %s' % e)
      return None
Esempio n. 21
0
  def config_file(self,
                  build_type='debug',
                  keystore_location='%(pants_configdir)s/android/debug.keystore',
                  keystore_alias='androiddebugkey',
                  keystore_password='******',
                  key_password='******'):
    with temporary_file() as fp:
      fp.write(textwrap.dedent(
        """

        [test-release]

        build_type: release
        keystore_location: /some/path
        keystore_alias: test
        keystore_password: password
        key_password: password

        [default-debug]

        build_type: {0}
        keystore_location: {1}
        keystore_alias: {2}
        keystore_password: {3}
        key_password: {4}
        """).format(build_type, keystore_location, keystore_alias, keystore_password, key_password))
      path = fp.name
      fp.close()
      yield path
Esempio n. 22
0
 def test_exclude(self):
     cmd = ["compile", "contrib/findbugs/tests/java/org/pantsbuild/contrib/findbugs:all"]
     with temporary_file(root_dir=get_buildroot()) as exclude_file:
         exclude_file.write(
             dedent(
                 """\
     <?xml version="1.0" encoding="UTF-8"?>
     <FindBugsFilter>
       <Match>
         <Bug pattern="NP_ALWAYS_NULL" />
         <Class name="org.pantsbuild.contrib.findbugs.AllWarnings" />
         <Method name="main" />
       </Match>
     </FindBugsFilter>
   """
             )
         )
         exclude_file.close()
         pants_ini_config = {"compile.findbugs": {"exclude_filter_file": exclude_file.name}}
         pants_run = self.run_pants(cmd, config=pants_ini_config)
     self.assert_success(pants_run)
     self.assertIn("Bug[high]:", pants_run.stdout_data)
     self.assertNotIn("Bug[normal]:", pants_run.stdout_data)
     self.assertIn("Bug[low]:", pants_run.stdout_data)
     self.assertNotIn("Errors:", pants_run.stdout_data)
     self.assertIn("Bugs: 2 (High: 1, Normal: 0, Low: 1)", pants_run.stdout_data)
  def test_pantsd_invalidation_file_tracking(self):
    test_dir = 'testprojects/src/python/print_env'
    config = {'GLOBAL': {'pantsd_invalidation_globs': '["%s/*"]' %(test_dir)}}
    with self.pantsd_successful_run_context(extra_config=config) as (
      pantsd_run, checker, workdir, _
    ):
      pantsd_run(['help'])
      checker.assert_started()

      # Let any fs events quiesce.
      time.sleep(5)

      def full_pantsd_log():
        return '\n'.join(read_pantsd_log(workdir))

      # Check the logs.
      assertRegex(
        self,
        full_pantsd_log(),
        r'watching invalidating files:.*{}'.format(test_dir)
      )

      checker.assert_running()

      # Create a new file in test_dir
      with temporary_file(suffix='.py', binary_mode=False, root_dir=test_dir) as temp_f:
        temp_f.write("import that\n")
        temp_f.close()

        time.sleep(10)
        checker.assert_stopped()

      self.assertIn('saw file events covered by invalidation globs', full_pantsd_log())
Esempio n. 24
0
def safe_classpath(classpath, synthetic_jar_dir):
  """Bundles classpath into one synthetic jar that includes original classpath in its manifest.

  This is to ensure classpath length never exceeds platform ARG_MAX.

  :param list classpath: Classpath to be bundled.
  :param string synthetic_jar_dir: directory to store the synthetic jar, if `None`
    a temp directory will be provided and cleaned up upon process exit. Otherwise synthetic
    jar will remain in the supplied directory, only for debugging purpose.

  :returns: A classpath (singleton list with just the synthetic jar).
  :rtype: list of strings
  """
  if synthetic_jar_dir:
    safe_mkdir(synthetic_jar_dir)
  else:
    synthetic_jar_dir = safe_mkdtemp()

  bundled_classpath = relativize_classpath(classpath, synthetic_jar_dir)

  manifest = Manifest()
  manifest.addentry(Manifest.CLASS_PATH, ' '.join(bundled_classpath))

  with temporary_file(root_dir=synthetic_jar_dir, cleanup=False, suffix='.jar') as jar_file:
    with open_zip(jar_file, mode='w', compression=ZIP_STORED) as jar:
      jar.writestr(Manifest.PATH, manifest.contents())
    return [jar_file.name]
Esempio n. 25
0
def safe_classpath(classpath, synthetic_jar_dir, custom_name=None):
  """Bundles classpath into one synthetic jar that includes original classpath in its manifest.

  This is to ensure classpath length never exceeds platform ARG_MAX.

  :param list classpath: Classpath to be bundled.
  :param string synthetic_jar_dir: directory to store the synthetic jar, if `None`
    a temp directory will be provided and cleaned up upon process exit. Otherwise synthetic
    jar will remain in the supplied directory, only for debugging purpose.
  :param custom_name: filename of the synthetic jar to be created.

  :returns: A classpath (singleton list with just the synthetic jar).
  :rtype: list of strings
  """
  if synthetic_jar_dir:
    safe_mkdir(synthetic_jar_dir)
  else:
    synthetic_jar_dir = safe_mkdtemp()

  # Quote the paths so that if they contain reserved characters can be safely passed to JVM classloader.
  bundled_classpath = map(urllib.quote, relativize_classpath(classpath, synthetic_jar_dir))

  manifest = Manifest()
  manifest.addentry(Manifest.CLASS_PATH, ' '.join(bundled_classpath))

  with temporary_file(root_dir=synthetic_jar_dir, cleanup=False, suffix='.jar') as jar_file:
    with open_zip(jar_file, mode='w', compression=ZIP_STORED) as jar:
      jar.writestr(Manifest.PATH, manifest.contents())

    if custom_name:
      custom_path = os.path.join(synthetic_jar_dir, custom_name)
      safe_concurrent_rename(jar_file.name, custom_path)
      return [custom_path]
    else:
      return [jar_file.name]
Esempio n. 26
0
 def test_download_path(self):
   with temporary_file() as fd:
     fd.close()
     downloaded, path = self.expect_download(path_or_fd=fd.name)
     self.assertEqual(path, fd.name)
     with open(path) as fp:
       self.assertEqual(downloaded, fp.read())
Esempio n. 27
0
  def assert_files(self, lang, rel_path, contents, service_writer, expected_files):
    assert_list(expected_files)

    with temporary_file() as fp:
      fp.write(contents)
      fp.close()
      self.assertEqual(set(expected_files), calculate_genfiles(fp.name, rel_path, service_writer)[lang])
Esempio n. 28
0
def bundled_classpath(classpath):
  """Bundles classpath into one synthetic jar that includes original classpath in its manifest.

  See https://docs.oracle.com/javase/7/docs/technotes/guides/extensions/spec.html#bundled

  :param list classpath: Classpath to be bundled.

  :returns: A classpath (singleton list with just the synthetic jar).
  :rtype: list of strings
  """
  def prepare_url(url):
    url_in_bundle = os.path.realpath(url)
    # append '/' for directories, those not ending with '/' are assumed to be jars
    if os.path.isdir(url):
      url_in_bundle += '/'
    return url_in_bundle

  bundled_classpath = [prepare_url(url) for url in classpath]

  manifest = Manifest()
  manifest.addentry(Manifest.CLASS_PATH, ' '.join(bundled_classpath))

  with temporary_file(cleanup=False, suffix='.jar') as jar_file:
    with open_zip(jar_file, mode='w', compression=ZIP_STORED) as jar:
      jar.writestr(Manifest.PATH, manifest.contents())
    yield [jar_file.name]
Esempio n. 29
0
 def xml_file(self,
              manifest_element='manifest',
              package_attribute='package',
              package_value='org.pantsbuild.example.hello',
              uses_sdk_element='uses-sdk',
              android_attribute='android:targetSdkVersion',
              activity_element='activity',
              android_name_attribute='android:name',
              application_name_value='org.pantsbuild.example.hello.HelloWorld'):
   """Represent an .xml file (Here an AndroidManifest.xml is used)."""
   with temporary_file(binary_mode=False) as fp:
     fp.write(textwrap.dedent(
       """<?xml version="1.0" encoding="utf-8"?>
       <{manifest} xmlns:android="http://schemas.android.com/apk/res/android"
                   xmlns:unrelated="http://schemas.android.com/apk/res/android"
           {package}="{package_name}" >
           <{uses_sdk}
               {android}="19" />
           <application >
               <{activity}
                   {android_name}="{application_name}" >
               </{activity}>
           </application>
       </{manifest}>""".format(manifest=manifest_element,
                               package=package_attribute,
                               package_name=package_value,
                               uses_sdk=uses_sdk_element,
                               android=android_attribute,
                               activity=activity_element,
                               android_name=android_name_attribute,
                               application_name=application_name_value)))
     fp.close()
     path = fp.name
     yield path
Esempio n. 30
0
  def binary_shader(self, output_jar, main, jar, custom_rules=None, jvm_options=None):
    """Yields an `Executor.Runner` that will perform shading of the binary `jar` when `run()`.

    The default rules will ensure the `main` class name is un-changed along with a minimal set of
    support classes but that everything else will be shaded.

    Any `custom_rules` are given highest precedence and so they can interfere with this automatic
    binary shading.  In general its safe to add exclusion rules to open up classes that need to be
    shared between the binary and the code it runs over.  An example would be excluding the
    `org.junit.Test` annotation class from shading since both a tool running junit needs to be able
    to scan for this annotation applied to the user code it tests.

    :param unicode output_jar: The path to dump the shaded jar to; will be over-written if it
                               exists.
    :param unicode main: The main class in the `jar` to preserve as the entry point.
    :param unicode jar: The path to the jar file to shade.
    :param list custom_rules: An optional list of custom `Shader.Rule`s.
    :param list jvm_options: an optional sequence of options for the underlying jvm
    :returns: An `Executor.Runner` that can be `run()` to shade the given `jar`.
    """
    with temporary_file() as fp:
      for rule in self.assemble_binary_rules(main, jar, custom_rules=custom_rules):
        fp.write(rule.render())
      fp.close()

      yield self._executor.runner(classpath=[self._jarjar],
                                  main='org.pantsbuild.jarjar.Main',
                                  jvm_options=jvm_options,
                                  args=['process', fp.name, jar, output_jar])
Esempio n. 31
0
def test_get_pants_cachedir() -> None:
    with environment_as(XDG_CACHE_HOME=""):
        assert os.path.expanduser("~/.cache/pants") == get_pants_cachedir()
    with temporary_file() as temp, environment_as(XDG_CACHE_HOME=temp.name):
        assert os.path.join(temp.name, "pants") == get_pants_cachedir()
Esempio n. 32
0
 def _write_to_tempfile(self, content):
     """Writes content to a temp file and returns the path to that file."""
     with temporary_file(cleanup=False, binary_mode=False) as output:
         output.write(content)
         return output.name
Esempio n. 33
0
  def instrument(self, output_dir):
    for datafile in self._iter_datafiles(output_dir):
      os.unlink(datafile)

    self._canonical_datafile = os.path.join(output_dir, '{}.canonical'.format(self._DATAFILE_NAME))
    # It's conceivable we'll be executing a test that has no source file dependencies; ie: we'll
    # never generate a canonical coverage datafile below. Create an empty one here to allow the
    # test run to proceeed normally.
    touch(self._canonical_datafile)

    # Setup an instrumentation classpath based on the existing runtime classpath.
    runtime_classpath = self._context.products.get_data('runtime_classpath')
    instrumentation_classpath = self._context.products.safe_create_data('instrument_classpath',
                                                                        runtime_classpath.copy)
    self.initialize_instrument_classpath(output_dir,
                                         self._settings,
                                         self._targets,
                                         instrumentation_classpath)

    cobertura_cp = self._settings.tool_classpath('cobertura-instrument')
    files_to_instrument = []
    for target in self._targets:
      if Cobertura.is_coverage_target(target):
        paths = instrumentation_classpath.get_for_target(target)
        for (name, path) in paths:
          files_to_instrument.append(path)

    if len(files_to_instrument) > 0:
      unique_files = list(set(files_to_instrument))
      relativize_paths(unique_files, self._settings.workdir)

      args = [
        '--basedir',
        self._settings.workdir,
        '--datafile',
        self._canonical_datafile,
      ]

      if self._include_user_classpath:
        closure = BuildGraph.closure(self._targets, bfs=True, include_scopes=Scopes.JVM_TEST_SCOPES,
          respect_intransitive=True)

        aux_classpath = safe_classpath(
          ClasspathUtil.classpath(closure, runtime_classpath),
          synthetic_jar_dir=None)
        args.append('--auxClasspath')
        args.extend(aux_classpath)

      # apply class incl/excl filters
      if len(self._include_classes) > 0:
        for pattern in self._include_classes:
          args += ["--includeClasses", pattern]
      else:
        args += ["--includeClasses", '.*']  # default to instrumenting all classes
      for pattern in self._exclude_classes:
        args += ["--excludeClasses", pattern]

      with temporary_file() as tmp_file:
        tmp_file.write("\n".join(unique_files))
        tmp_file.flush()

        args += ["--listOfFilesToInstrument", tmp_file.name]

        main = 'net.sourceforge.cobertura.instrument.InstrumentMain'
        self._settings.log.debug(
          "executing cobertura instrumentation with the following args: {}".format(args))
        result = self._execute_java(classpath=cobertura_cp,
                                    main=main,
                                    jvm_options=self._settings.coverage_jvm_options,
                                    args=args,
                                    workunit_factory=self._context.new_workunit,
                                    workunit_name='cobertura-instrument')
        if result != 0:
          raise TaskError("java {0} ... exited non-zero ({1})"
                          " 'failed to instrument'".format(main, result))
Esempio n. 34
0
 def assert_files(self, task, rel_path, contents, expected_files):
   assert_list(expected_files)
   with temporary_file() as fp:
     fp.write(contents)
     fp.close()
     self.assertEqual(set(expected_files), set(task.calculate_genfiles(fp.name, rel_path)))
Esempio n. 35
0
    def test_permissions(self) -> None:
        with temporary_file(permissions=0o700) as f:
            self.assertEqual(0o700, os.stat(f.name)[0] & 0o777)

        with temporary_dir(permissions=0o644) as path:
            self.assertEqual(0o644, os.stat(path)[0] & 0o777)
Esempio n. 36
0
    def _construct_cmd_args(jars, common_args, global_excludes, pinned_coords,
                            coursier_workdir, json_output_path):

        # Make a copy, so there is no side effect or others using `common_args`
        cmd_args = list(common_args)

        cmd_args.extend(['--json-output-file', json_output_path])

        # Dealing with intransitivity and forced versions.
        for j in jars:
            if not j.rev:
                raise TaskError(
                    'Undefined revs for jars unsupported by Coursier. "{}"'.
                    format(repr(j.coordinate).replace('M2Coordinate', 'jar')))

            module = j.coordinate.simple_coord
            if j.coordinate.classifier:
                module += ',classifier={}'.format(j.coordinate.classifier)

            if j.get_url():
                jar_url = j.get_url()
                module += ',url={}'.format(parse.quote_plus(jar_url))

            if j.intransitive:
                cmd_args.append('--intransitive')

            cmd_args.append(module)

            # Force requires specifying the coord again with -V
            if j.force:
                cmd_args.append('-V')
                cmd_args.append(j.coordinate.simple_coord)

        # Force pinned coordinates
        for m2coord in pinned_coords:
            cmd_args.append('-V')
            cmd_args.append(m2coord.simple_coord)

        # Local exclusions
        local_exclude_args = []
        for jar in jars:
            for ex in jar.excludes:
                # `--` means exclude. See --local-exclude-file in `coursier fetch --help`
                # If ex.name does not exist, that means the whole org needs to be excluded.
                ex_arg = "{}:{}--{}:{}".format(jar.org, jar.name, ex.org,
                                               ex.name or '*')
                local_exclude_args.append(ex_arg)

        if local_exclude_args:
            with temporary_file(coursier_workdir, cleanup=False) as f:
                exclude_file = f.name
                with open(exclude_file, 'w') as ex_f:
                    ex_f.write('\n'.join(local_exclude_args))

                cmd_args.append('--local-exclude-file')
                cmd_args.append(exclude_file)

        for ex in global_excludes:
            cmd_args.append('-E')
            cmd_args.append('{}:{}'.format(ex.org, ex.name or '*'))

        return cmd_args
Esempio n. 37
0
 def test_setup_default_config_no_permission(self):
   with temporary_file() as temp:
     os.chmod(temp.name, 0o400)
     with self.assertRaises(TaskError):
       SignApkTask.setup_default_config(temp.name)
Esempio n. 38
0
 def has_ns(stmt):
     with temporary_file() as fp:
         fp.write(stmt)
         fp.flush()
         return SetupPy.declares_namespace_package(fp.name)
Esempio n. 39
0
 def _do_assert_fromfile(dest, expected, contents):
   with temporary_file() as fp:
     fp.write(contents)
     fp.close()
     options = parse_func(dest, fp.name)
     self.assertEqual(expected, options.for_scope('fromfile')[dest])
Esempio n. 40
0
 def _zip_sources(target, location, suffix='.jar'):
   with temporary_file(root_dir=location, cleanup=False, suffix=suffix) as f:
     with zipfile.ZipFile(f, 'a') as zip_file:
       for src_from_source_root, src_from_build_root in zip(target.sources_relative_to_source_root(), target.sources_relative_to_buildroot()):
         zip_file.write(os.path.join(get_buildroot(), src_from_build_root), src_from_source_root)
   return f
Esempio n. 41
0
 def has_ns(stmt):
     with temporary_file(binary_mode=False) as fp:
         fp.write(stmt)
         fp.flush()
         return declares_namespace_package(fp.name)
Esempio n. 42
0
    def instrument(self):
        # Setup an instrumentation classpath based on the existing runtime classpath.
        runtime_classpath = self._context.products.get_data(
            'runtime_classpath')
        instrumentation_classpath = self._context.products.safe_create_data(
            'instrument_classpath', runtime_classpath.copy)
        self.initialize_instrument_classpath(self._targets,
                                             instrumentation_classpath)

        cobertura_cp = self._settings.tool_classpath('cobertura-instrument')
        safe_delete(self._coverage_datafile)
        files_to_instrument = []
        for target in self._targets:
            if self.is_coverage_target(target):
                paths = instrumentation_classpath.get_for_target(target)
                for (name, path) in paths:
                    files_to_instrument.append(path)

        if len(files_to_instrument) > 0:
            self._nothing_to_instrument = False

            unique_files = list(set(files_to_instrument))
            relativize_paths(unique_files, self._settings.workdir)

            args = [
                '--basedir',
                self._settings.workdir,
                '--datafile',
                self._coverage_datafile,
            ]
            # apply class incl/excl filters
            if len(self._include_classes) > 0:
                for pattern in self._include_classes:
                    args += ["--includeClasses", pattern]
            else:
                args += ["--includeClasses",
                         '.*']  # default to instrumenting all classes
            for pattern in self._exclude_classes:
                args += ["--excludeClasses", pattern]

            with temporary_file() as tmp_file:
                tmp_file.write("\n".join(unique_files))
                tmp_file.flush()

                args += ["--listOfFilesToInstrument", tmp_file.name]

                main = 'net.sourceforge.cobertura.instrument.InstrumentMain'
                self._context.log.debug(
                    "executing cobertura instrumentation with the following args: {}"
                    .format(args))
                result = self._execute_java(
                    classpath=cobertura_cp,
                    main=main,
                    jvm_options=self._coverage_jvm_options,
                    args=args,
                    workunit_factory=self._context.new_workunit,
                    workunit_name='cobertura-instrument')
                if result != 0:
                    raise TaskError("java {0} ... exited non-zero ({1})"
                                    " 'failed to instrument'".format(
                                        main, result))
Esempio n. 43
0
 def assert_files(self, lang, rel_path, contents, *expected_files):
   with temporary_file() as fp:
     fp.write(contents)
     fp.close()
     self.assertEqual(set(expected_files), calculate_genfiles(fp.name, rel_path)[lang])
Esempio n. 44
0
 def temporary_rules_file(self, rules):
   with temporary_file(binary_mode=False) as fp:
     for rule in rules:
       fp.write(rule.render())
     fp.close()
     yield fp.name
Esempio n. 45
0
 def test_set_cachedir(self) -> None:
     with temporary_file() as temp:
         with environment_as(XDG_CACHE_HOME=temp.name):
             self.assertEqual(os.path.join(temp.name, "pants"),
                              get_pants_cachedir())
Esempio n. 46
0
def atomic_copy(src, dst):
    """Copy the file src to dst, overwriting dst atomically."""
    with temporary_file(root_dir=os.path.dirname(dst)) as tmp_dst:
        shutil.copyfile(src, tmp_dst.name)
        os.rename(tmp_dst.name, dst)
Esempio n. 47
0
 def _generate_to_tempfile(self, generator):
     """Applies the specified generator to a temp file and returns the path to that file.
 We generate into a temp file so that we don't lose any manual customizations on error."""
     with temporary_file(cleanup=False, binary_mode=False) as output:
         generator.write(output)
         return output.name
Esempio n. 48
0
 def jarfile(self):
     with temporary_file(root_dir=self.workdir, suffix='.jar') as fd:
         fd.close()
         yield fd.name
Esempio n. 49
0
    def _compile_target(self, target):
        # "Compiles" a target by forming an isolated chroot of its sources and transitive deps and then
        # attempting to import each of the target's sources in the case of a python library or else the
        # entry point in the case of a python binary.
        #
        # For a library with sources lib/core.py and lib/util.py a "compiler" main file would look like:
        #
        #   if __name__ == '__main__':
        #     import lib.core
        #     import lib.util
        #
        # For a binary with entry point lib.bin:main the "compiler" main file would look like:
        #
        #   if __name__ == '__main__':
        #     from lib.bin import main
        #
        # In either case the main file is executed within the target chroot to reveal missing BUILD
        # dependencies.

        with self.context.new_workunit(name=target.address.spec):
            modules = []
            if isinstance(target, PythonBinary):
                source = 'entry_point {}'.format(target.entry_point)
                components = target.entry_point.rsplit(':', 1)
                module = components[0]
                if len(components) == 2:
                    function = components[1]
                    data = TemplateData(
                        source=source,
                        import_statement='from {} import {}'.format(
                            module, function))
                else:
                    data = TemplateData(
                        source=source,
                        import_statement='import {}'.format(module))
                modules.append(data)
            else:
                for path in target.sources_relative_to_source_root():
                    if path.endswith('.py'):
                        if os.path.basename(path) == '__init__.py':
                            module_path = os.path.dirname(path)
                        else:
                            module_path, _ = os.path.splitext(path)
                        source = 'file {}'.format(
                            os.path.join(target.target_base, path))
                        module = module_path.replace(os.path.sep, '.')
                        data = TemplateData(
                            source=source,
                            import_statement='import {}'.format(module))
                        modules.append(data)

            if not modules:
                # Nothing to eval, so a trivial compile success.
                return 0

            interpreter = self.select_interpreter_for_targets([target])

            if isinstance(target, PythonBinary):
                pexinfo, platforms = target.pexinfo, target.platforms
            else:
                pexinfo, platforms = None, None

            with self.temporary_pex_builder(interpreter=interpreter,
                                            pex_info=pexinfo) as builder:
                with self.context.new_workunit(name='resolve'):
                    chroot = PythonChroot(context=self.context,
                                          targets=[target],
                                          builder=builder,
                                          platforms=platforms,
                                          interpreter=interpreter)

                    chroot.dump()

                with temporary_file() as imports_file:
                    generator = Generator(pkgutil.get_data(
                        __name__, self._EVAL_TEMPLATE_PATH),
                                          chroot=chroot.path(),
                                          modules=modules)
                    generator.write(imports_file)
                    imports_file.close()

                    builder.set_executable(imports_file.name,
                                           '__pants_python_eval__.py')

                    builder.freeze()
                    pex = PEX(builder.path(), interpreter=interpreter)

                    with self.context.new_workunit(
                            name='eval',
                            labels=[
                                WorkUnit.COMPILER, WorkUnit.RUN, WorkUnit.TOOL
                            ],
                            cmd=' '.join(pex.cmdline())) as workunit:
                        returncode = pex.run(stdout=workunit.output('stdout'),
                                             stderr=workunit.output('stderr'))
                        workunit.set_outcome(WorkUnit.SUCCESS if returncode ==
                                             0 else WorkUnit.FAILURE)
                        if returncode != 0:
                            self.context.log.error('Failed to eval {}'.format(
                                target.address.spec))
                        return returncode
 def test_dump_jar_contents_bad_zip(self):
     with temporary_file() as corrupted:
         with self.assertRaisesRegexp(
                 TaskError, r'{}'.format(os.path.realpath(corrupted.name))):
             self.task._dump_jar_contents(corrupted.name)
Esempio n. 51
0
 def assert_files(self, package, contents, *expected_files):
   with temporary_file() as fp:
     fp.write(contents)
     fp.close()
     self.assertEqual(set(expected_files), set(JaxbGen._sources_to_be_generated(package, fp.name)))
Esempio n. 52
0
    def _cov_setup(self, targets, chroot, coverage_modules=None):
        def compute_coverage_modules(target):
            if target.coverage:
                return target.coverage
            else:
                # This makes the assumption that tests/python/<target> will be testing src/python/<target>.
                # Note in particular that this doesn't work for pants' own tests, as those are under
                # the top level package 'pants_tests', rather than just 'pants'.
                # TODO(John Sirois): consider failing fast if there is no explicit coverage scheme; but also
                # consider supporting configuration of a global scheme whether that be parallel
                # dirs/packages or some arbitrary function that can be registered that takes a test target
                # and hands back the source packages or paths under test.
                return set(
                    os.path.dirname(source).replace(os.sep, '.')
                    for source in target.sources_relative_to_source_root())

        if coverage_modules is None:
            coverage_modules = set(
                itertools.chain(
                    *[compute_coverage_modules(t) for t in targets]))

        # Hack in turning off pytest_cov reporting to the console - we want control this ourselves.
        # Take the approach of registering a plugin that replaces the pycov plugin's
        # `pytest_terminal_summary` callback with a noop.
        with temporary_dir() as plugin_root:
            plugin_root = os.path.realpath(plugin_root)
            with safe_open(os.path.join(plugin_root, 'pants_reporter.py'),
                           'w') as fp:
                fp.write(
                    dedent("""
          def pytest_configure(__multicall__, config):
            # This executes the rest of the pytest_configures ensuring the `pytest_cov` plugin is
            # registered so we can grab it below.
            __multicall__.execute()
            pycov = config.pluginmanager.getplugin('_cov')
            # Squelch console reporting
            pycov.pytest_terminal_summary = lambda *args, **kwargs: None
        """))

            pythonpath = os.environ.get('PYTHONPATH')
            existing_pythonpath = pythonpath.split(
                os.pathsep) if pythonpath else []
            with environment_as(
                    PYTHONPATH=os.pathsep.join(existing_pythonpath +
                                               [plugin_root])):

                def is_python_lib(tgt):
                    return tgt.has_sources('.py') and not isinstance(
                        tgt, PythonTests)

                source_mappings = {}
                for target in targets:
                    libs = (tgt for tgt in target.closure()
                            if is_python_lib(tgt))
                    for lib in libs:
                        source_mappings[lib.target_base] = [chroot]

                cp = self._generate_coverage_config(
                    source_mappings=source_mappings)
                with temporary_file() as fp:
                    cp.write(fp)
                    fp.close()
                    coverage_rc = fp.name
                    args = [
                        '-p', 'pants_reporter', '-p', 'pytest_cov',
                        '--cov-config', coverage_rc
                    ]
                    for module in coverage_modules:
                        args.extend(['--cov', module])
                    yield args, coverage_rc
Esempio n. 53
0
    def _execute_junit_runner(self,
                              list_of_filename_content_tuples,
                              create_some_resources=True,
                              target_name=None):
        # Create the temporary base test directory
        test_rel_path = "tests/java/org/pantsbuild/foo"

        # Create the temporary classes directory under work dir
        test_classes_abs_path = self.create_workdir_dir(test_rel_path)

        test_java_file_abs_paths = []
        # Generate the temporary java test source code.
        for filename, content in list_of_filename_content_tuples:
            test_java_file_rel_path = os.path.join(test_rel_path, filename)
            test_java_file_abs_path = self.create_file(test_java_file_rel_path,
                                                       content)
            test_java_file_abs_paths.append(test_java_file_abs_path)

        distribution = DistributionLocator.cached(jdk=True)
        executor = SubprocessExecutor(distribution=distribution)
        with temporary_file() as f:
            res = executor.execute(
                classpath=[
                    global_subsystem_instance(CoursierSubsystem).select()
                ],
                main="coursier.cli.Coursier",
                args=[
                    "fetch",
                    "junit:junit:4.12",
                    "-r",
                    # This is needed to get around the maven blacklisting RBE.
                    "https://maven-central.storage-download.googleapis.com/repos/central/data",
                ],
                stdout=f,
            )
            self.assertEqual(0, res, "Coursier resolve failed.")
            f.seek(0)
            classpath = ":".join(f.read().decode().split())

        # Now directly invoke javac to compile the test java code into classfiles that we can later
        # inject into a product mapping for JUnitRun to execute against.
        javac = distribution.binary("javac")
        subprocess.check_call(
            [javac, "-d", test_classes_abs_path, "-cp", classpath] +
            test_java_file_abs_paths)

        # If a target_name is specified create a target with it, otherwise create a junit_tests target.
        if target_name:
            target = self.target(target_name)
        else:
            target = self.create_library(
                path=test_rel_path,
                target_type="junit_tests",
                name="foo_test",
                sources=["FooTest.java"],
            )

        target_roots = []
        if create_some_resources:
            # Create a synthetic resource target.
            target_roots.append(self.make_target("some_resources", Resources))
        target_roots.append(target)

        # Set the context with the two targets, one junit_tests target and
        # one synthetic resources target.
        # The synthetic resources target is to make sure we won't regress
        # in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note
        # in that bug, the resources target must be the first one in the list.
        context = self.context(target_roots=target_roots)

        # Before we run the task, we need to inject the "runtime_classpath" with
        # the compiled test java classes that JUnitRun will know which test
        # classes to execute. In a normal run, this "runtime_classpath" will be
        # populated by java compilation step.
        self.populate_runtime_classpath(context=context,
                                        classpath=[test_classes_abs_path])

        # Finally execute the task.
        self.execute(context)
    def test_create_bootstrapped_multiple_config_override(self):
        # check with multiple config files, the latest values always get taken
        # in this case strategy will be overwritten, while fruit stays the same
        with temporary_file() as fp:
            fp.write(
                dedent("""
      [compile.apt]
      strategy: global

      [fruit]
      apple: red
      """))
            fp.close()

            bootstrapper_single_config = OptionsBootstrapper(
                configpath=fp.name,
                args=['--config-override={}'.format(fp.name)])

            opts_single_config = bootstrapper_single_config.get_full_options(
                known_scope_infos=[
                    ScopeInfo('', ScopeInfo.GLOBAL),
                    ScopeInfo('compile.apt', ScopeInfo.TASK),
                    ScopeInfo('fruit', ScopeInfo.TASK),
                ])
            opts_single_config.register(
                '', '--config-override'
            )  # So we don't choke on it on the cmd line.
            opts_single_config.register('compile.apt', '--strategy')
            opts_single_config.register('fruit', '--apple')

            self.assertEquals(
                'global',
                opts_single_config.for_scope('compile.apt').strategy)
            self.assertEquals('red',
                              opts_single_config.for_scope('fruit').apple)

            with temporary_file() as fp2:
                fp2.write(
                    dedent("""
        [compile.apt]
        strategy: isolated
        """))
                fp2.close()

                bootstrapper_double_config = OptionsBootstrapper(
                    configpath=fp.name,
                    args=[
                        '--config-override={}'.format(fp.name),
                        '--config-override={}'.format(fp2.name)
                    ])

                opts_double_config = bootstrapper_double_config.get_full_options(
                    known_scope_infos=[
                        ScopeInfo('', ScopeInfo.GLOBAL),
                        ScopeInfo('compile.apt', ScopeInfo.TASK),
                        ScopeInfo('fruit', ScopeInfo.TASK),
                    ])
                opts_double_config.register(
                    '', '--config-override'
                )  # So we don't choke on it on the cmd line.
                opts_double_config.register('compile.apt', '--strategy')
                opts_double_config.register('fruit', '--apple')

                self.assertEquals(
                    'isolated',
                    opts_double_config.for_scope('compile.apt').strategy)
                self.assertEquals('red',
                                  opts_double_config.for_scope('fruit').apple)
Esempio n. 55
0
 def _tmpfile(self, cache_key, use):
     """Allocate tempfile on same device as cache with a suffix chosen to prevent collisions"""
     with temporary_file(suffix=cache_key.id + use,
                         root_dir=self._cache_root,
                         permissions=self._permissions) as tmpfile:
         yield tmpfile
Esempio n. 56
0
 def test_smoke(self):
     with temporary_file() as fp:
         fp.write(ragel_file_contents)
         fp.flush()
         self.assertEquals(calculate_genfile(fp.name),
                           'com/example/atoi/Parser.java')
Esempio n. 57
0
def atomic_copy(src: str, dst: str) -> None:
    """Copy the file src to dst, overwriting dst atomically."""
    with temporary_file(root_dir=os.path.dirname(dst)) as tmp_dst:
        shutil.copyfile(src, tmp_dst.name)
        os.chmod(tmp_dst.name, os.stat(src).st_mode)
        os.rename(tmp_dst.name, dst)
Esempio n. 58
0
    def test_permissions(self):
        with temporary_file(permissions=0700) as f:
            self.assertEquals(0700, os.stat(f.name)[0] & 0777)

        with temporary_dir(permissions=0644) as path:
            self.assertEquals(0644, os.stat(path)[0] & 0777)
Esempio n. 59
0
 def jarfile(self):
   with temporary_file() as fd:
     fd.close()
     yield fd.name
Esempio n. 60
0
    def _get_mypy_pex(self, py3_interpreter: PythonInterpreter,
                      *extra_pexes: PEX) -> PEX:
        mypy_version = self._mypy_subsystem.options.version
        extras_hash = hash_utils.hash_all(
            hash_utils.hash_dir(Path(extra_pex.path()))
            for extra_pex in extra_pexes)

        path = Path(self.workdir, str(py3_interpreter.identity),
                    f"{mypy_version}-{extras_hash}")
        pex_dir = str(path)
        if not path.is_dir():
            mypy_requirement_pex = self.resolve_requirement_strings(
                py3_interpreter, [mypy_version])
            pex_info = PexInfo.default()
            pex_info.entry_point = "pants_mypy_launcher"
            with self.merged_pex(
                    path=pex_dir,
                    pex_info=pex_info,
                    interpreter=py3_interpreter,
                    pexes=[mypy_requirement_pex, *extra_pexes],
            ) as builder:
                with temporary_file(binary_mode=False) as exe_fp:
                    # MyPy searches for types for a package in packages containing a `py.types`
                    # marker file or else in a sibling `<package>-stubs` package as per PEP-0561.
                    # Going further than that PEP, MyPy restricts its search to `site-packages`.
                    # Since PEX deliberately isolates itself from `site-packages` as part of its
                    # raison d'etre, we monkey-patch `site.getsitepackages` to look inside the
                    # scrubbed PEX sys.path before handing off to `mypy`.
                    #
                    # As a complication, MyPy does its own validation to ensure packages aren't
                    # both available in site-packages and on the PYTHONPATH. As such, we elide all
                    # PYTHONPATH entries from artificial site-packages we set up since MyPy will
                    # manually scan PYTHONPATH outside this PEX to find packages.
                    #
                    # See:
                    #   https://mypy.readthedocs.io/en/stable/installed_packages.html#installed-packages
                    #   https://www.python.org/dev/peps/pep-0561/#stub-only-packages
                    exe_fp.write(
                        dedent("""
                            import os
                            import runpy
                            import site
                            import sys

                            PYTHONPATH = frozenset(
                                os.path.realpath(p)
                                for p in os.environ.get('PYTHONPATH', '').split(os.pathsep)
                            )
                            
                            site.getsitepackages = lambda: [
                                p for p in sys.path if os.path.realpath(p) not in PYTHONPATH
                            ]

                            runpy.run_module('mypy', run_name='__main__')
                            """))
                    exe_fp.flush()
                    builder.set_executable(
                        filename=exe_fp.name,
                        env_filename=f"{pex_info.entry_point}.py")
                builder.freeze(bytecode_compile=False)

        return PEX(pex_dir, py3_interpreter)