示例#1
0
  def test_symlink_cachepath(self):
    self.maxDiff = None
    with temporary_dir() as mock_cache_dir:
      with temporary_dir() as symlink_dir:
        with temporary_dir() as classpath_dir:
          input_path = os.path.join(classpath_dir, 'inpath')
          output_path = os.path.join(classpath_dir, 'classpath')
          existing_symlink_map = {}
          foo_path = os.path.join(mock_cache_dir, 'foo.jar')
          with open(foo_path, 'w') as foo:
            foo.write("test jar contents")

          with open(input_path, 'w') as inpath:
            inpath.write(foo_path)
          result_map = IvyUtils.symlink_cachepath(mock_cache_dir, input_path, symlink_dir,
                                                  output_path, existing_symlink_map)
          symlink_foo_path = os.path.join(symlink_dir, 'foo.jar')
          self.assertEquals(
            {
              foo_path : symlink_foo_path,
              os.path.realpath(foo_path) : symlink_foo_path
            },
            result_map)
          with open(output_path, 'r') as outpath:
            self.assertEquals(symlink_foo_path, outpath.readline())
          self.assertTrue(os.path.islink(symlink_foo_path))
          self.assertTrue(os.path.exists(symlink_foo_path))

          # Now add an additional path to the existing map
          bar_path = os.path.join(mock_cache_dir, 'bar.jar')
          with open(bar_path, 'w') as bar:
            bar.write("test jar contents2")
          with open(input_path, 'w') as inpath:
            inpath.write(os.pathsep.join([foo_path, bar_path]))
          existing_symlink_map = result_map
          result_map = IvyUtils.symlink_cachepath(mock_cache_dir, input_path, symlink_dir,
                                                  output_path, existing_symlink_map)
          symlink_bar_path = os.path.join(symlink_dir, 'bar.jar')
          self.assertEquals(
            {
              foo_path : symlink_foo_path,
              os.path.realpath(foo_path) : symlink_foo_path,
              bar_path : symlink_bar_path,
              os.path.realpath(bar_path) : symlink_bar_path,
            },
            result_map)
          with open(output_path, 'r') as outpath:
            self.assertEquals(symlink_foo_path + os.pathsep + symlink_bar_path, outpath.readline())
          self.assertTrue(os.path.islink(symlink_foo_path))
          self.assertTrue(os.path.exists(symlink_foo_path))
          self.assertTrue(os.path.islink(symlink_bar_path))
          self.assertTrue(os.path.exists(symlink_bar_path))

          # Reverse the ordering and make sure order is preserved in the output path
          with open(input_path, 'w') as inpath:
            inpath.write(os.pathsep.join([bar_path, foo_path]))
          IvyUtils.symlink_cachepath(mock_cache_dir, input_path, symlink_dir,
                                                  output_path, result_map)
          with open(output_path, 'r') as outpath:
            self.assertEquals(symlink_bar_path + os.pathsep + symlink_foo_path, outpath.readline())
  def test_compile_changed(self):
    cmd = ['compile-changed', '--diffspec={}'.format(self.ref_for_greet_change())]

    with temporary_dir(root_dir=self.workdir_root()) as workdir:
      # Nothing exists.
      self.assertFalse(os.path.exists(self.greet_classfile(workdir, 'Greeting.class')))
      self.assertFalse(os.path.exists(self.greet_classfile(workdir, 'GreetingTest.class')))

      run = self.run_pants_with_workdir(cmd, workdir)
      self.assert_success(run)

      # The directly changed target's produced classfile exists.
      self.assertTrue(os.path.exists(self.greet_classfile(workdir, 'Greeting.class')))
      self.assertFalse(os.path.exists(self.greet_classfile(workdir, 'GreetingTest.class')))

    with temporary_dir(root_dir=self.workdir_root()) as workdir:
      # Nothing exists.
      self.assertFalse(os.path.exists(self.greet_classfile(workdir, 'Greeting.class')))
      self.assertFalse(os.path.exists(self.greet_classfile(workdir, 'GreetingTest.class')))

      run = self.run_pants_with_workdir(cmd + ['--include-dependees=direct'], workdir)
      self.assert_success(run)

      # The changed target's and its direct dependees' (eg its tests) classfiles exist.
      self.assertTrue(os.path.exists(self.greet_classfile(workdir, 'Greeting.class')))
      self.assertTrue(os.path.exists(self.greet_classfile(workdir, 'GreetingTest.class')))
示例#3
0
 def test_junit_run_target_cwd_trumps_cwd_option(self):
   with temporary_dir() as target_cwd:
     self.make_target(
       spec='tests/java/org/pantsbuild/foo:foo_test',
       target_type=JUnitTests,
       sources=['FooTest.java'],
       cwd=target_cwd
     )
     content = dedent("""
       package org.pantsbuild.foo;
       import java.io.File;
       import org.junit.Test;
       import static org.junit.Assert.assertFalse;
       import static org.junit.Assert.assertTrue;
       public class FooTest {
         @Test
         public void testFoo() {
           assertTrue(new File("target_cwd_sentinel").exists());
           assertFalse(new File("option_cwd_sentinel").exists());
         }
       }
     """)
     touch(os.path.join(target_cwd, 'target_cwd_sentinel'))
     with temporary_dir() as option_cwd:
       touch(os.path.join(option_cwd, 'option_cwd_sentinel'))
       self.set_options(cwd=option_cwd)
       self._execute_junit_runner([('FooTest.java', content)],
                                  target_name='tests/java/org/pantsbuild/foo:foo_test')
示例#4
0
  def test_combined_cache(self):
    """Make sure that the combined cache finds what it should and that it backfills."""
    httpd = None
    httpd_thread = None
    try:
      with temporary_dir() as http_root:
        with temporary_dir() as cache_root:
          with pushd(http_root):  # SimpleRESTHandler serves from the cwd.
            httpd = SocketServer.TCPServer(('localhost', 0), SimpleRESTHandler)
            port = httpd.server_address[1]
            httpd_thread = Thread(target=httpd.serve_forever)
            httpd_thread.start()
            with temporary_dir() as artifact_root:
              local = LocalArtifactCache(None, artifact_root, cache_root)
              remote = RESTfulArtifactCache(MockLogger(), artifact_root, 'http://localhost:%d' % port)
              combined = CombinedArtifactCache([local, remote])

              key = CacheKey('muppet_key', 'fake_hash', 42)

              with temporary_file(artifact_root) as f:
                # Write the file.
                f.write(TEST_CONTENT1)
                path = f.name
                f.close()

                # No cache has key.
                self.assertFalse(local.has(key))
                self.assertFalse(remote.has(key))
                self.assertFalse(combined.has(key))

                # No cache returns key.
                self.assertFalse(bool(local.use_cached_files(key)))
                self.assertFalse(bool(remote.use_cached_files(key)))
                self.assertFalse(bool(combined.use_cached_files(key)))

                # Attempting to use key that no cache had should not change anything.
                self.assertFalse(local.has(key))
                self.assertFalse(remote.has(key))
                self.assertFalse(combined.has(key))

                # Add to only remote cache.
                remote.insert(key, [path])

                self.assertFalse(local.has(key))
                self.assertTrue(remote.has(key))
                self.assertTrue(combined.has(key))

                # Successfully using via remote should NOT change local.
                self.assertTrue(bool(remote.use_cached_files(key)))
                self.assertFalse(local.has(key))

                # Successfully using via combined SHOULD backfill local.
                self.assertTrue(bool(combined.use_cached_files(key)))
                self.assertTrue(local.has(key))
                self.assertTrue(bool(local.use_cached_files(key)))
    finally:
      if httpd:
        httpd.shutdown()
      if httpd_thread:
        httpd_thread.join()
示例#5
0
  def unpack_target(self, unpacked_whls, unpack_dir):
    interpreter = self._compatible_interpreter(unpacked_whls)

    with temporary_dir() as resolve_dir,\
         temporary_dir() as extract_dir:
      try:
        matched_dist = self._get_matching_wheel(resolve_dir, interpreter,
                                                unpacked_whls.all_imported_requirements,
                                                unpacked_whls.module_name)
        ZIP.extract(matched_dist.location, extract_dir)
        if unpacked_whls.within_data_subdir:
          data_dir_prefix = '{name}-{version}.data/{subdir}'.format(
            name=matched_dist.project_name,
            version=matched_dist.version,
            subdir=unpacked_whls.within_data_subdir,
          )
          dist_data_dir = os.path.join(extract_dir, data_dir_prefix)
        else:
          dist_data_dir = extract_dir
        unpack_filter = self.get_unpack_filter(unpacked_whls)
        # Copy over the module's data files into `unpack_dir`.
        mergetree(dist_data_dir, unpack_dir, file_filter=unpack_filter)
      except Exception as e:
        raise self.WheelUnpackingError(
          "Error extracting wheel for target {}: {}"
          .format(unpacked_whls, str(e)),
          e)
  def test_analysis_portability(self):
    # Tests that analysis can be relocated between workdirs and still result in incremental
    # compile.
    with temporary_dir() as cache_dir, temporary_dir(root_dir=get_buildroot()) as src_dir, \
      temporary_dir(root_dir=get_buildroot(), suffix='.pants.d') as workdir:
      config = {
        'cache.compile.zinc': {'write_to': [cache_dir], 'read_from': [cache_dir]},
      }

      dep_src_file = os.path.join(src_dir, 'org', 'pantsbuild', 'dep', 'A.scala')
      dep_build_file = os.path.join(src_dir, 'org', 'pantsbuild', 'dep', 'BUILD')
      con_src_file = os.path.join(src_dir, 'org', 'pantsbuild', 'consumer', 'B.scala')
      con_build_file = os.path.join(src_dir, 'org', 'pantsbuild', 'consumer', 'BUILD')

      dep_spec = os.path.join(os.path.basename(src_dir), 'org', 'pantsbuild', 'dep')
      con_spec = os.path.join(os.path.basename(src_dir), 'org', 'pantsbuild', 'consumer')

      dep_src = "package org.pantsbuild.dep; class A {}"

      self.create_file(dep_src_file, dep_src)
      self.create_file(dep_build_file, "scala_library()")
      self.create_file(con_src_file, dedent(
        """package org.pantsbuild.consumer
           import org.pantsbuild.dep.A
           class B { def mkA: A = new A() }"""))
      self.create_file(con_build_file, "scala_library(dependencies=['{}'])".format(dep_spec))

      rel_workdir = fast_relpath(workdir, get_buildroot())
      rel_src_dir = fast_relpath(src_dir, get_buildroot())
      with self.mock_buildroot(dirs_to_copy=[rel_src_dir, rel_workdir]) as buildroot, \
        buildroot.pushd():
        # 1) Compile in one buildroot.
        self.run_compile(con_spec, config, os.path.join(buildroot.new_buildroot, rel_workdir))

      with self.mock_buildroot(dirs_to_copy=[rel_src_dir, rel_workdir]) as buildroot, \
        buildroot.pushd():
        # 2) Compile in another buildroot, and check that we hit the cache.
        new_workdir = os.path.join(buildroot.new_buildroot, rel_workdir)
        run_two = self.run_compile(con_spec, config, new_workdir)
        self.assertTrue(
            re.search(
              "\[zinc\][^[]*\[cache\][^[]*Using cached artifacts for 2 targets.",
              run_two.stdout_data),
            run_two.stdout_data)

        # 3) Edit the dependency in a way that should trigger an incremental
        #    compile of the consumer.
        mocked_dep_src_file = os.path.join(
          buildroot.new_buildroot,
          fast_relpath(dep_src_file, get_buildroot()))
        self.create_file(mocked_dep_src_file, dep_src + "; /* this is a comment */")

        # 4) Compile and confirm that the analysis fetched from the cache in
        #    step 2 causes incrementalism: ie, zinc does not report compiling any files.
        run_three = self.run_compile(con_spec, config, new_workdir)
        self.assertTrue(
            re.search(
              r"/org/pantsbuild/consumer:consumer\)[^[]*\[compile\][^[]*\[zinc\]\W*\[info\] Compile success",
              run_three.stdout_data),
            run_three.stdout_data)
  def test_scalastyle_cached(self):
    with temporary_dir(root_dir=self.workdir_root()) as cache:
      with temporary_dir(root_dir=self.workdir_root()) as workdir:
        scalastyle_args = [
          'clean-all',
          'compile.scalastyle',
          "--cache-write-to=['{}']".format(cache),
          "--cache-read-from=['{}']".format(cache),
          'examples/tests/scala/org/pantsbuild/example/hello/welcome',
          '-ldebug'
        ]

        pants_run = self.run_pants_with_workdir(scalastyle_args, workdir)
        self.assert_success(pants_run)
        self.assertIn('abc_Scalastyle_compile_scalastyle will write to local artifact cache',
            pants_run.stdout_data)

        pants_run = self.run_pants_with_workdir(scalastyle_args, workdir)
        self.assert_success(pants_run)
        self.assertIn('abc_Scalastyle_compile_scalastyle will read from local artifact cache',
            pants_run.stdout_data)
        # make sure we are *only* reading from the cache and not also writing,
        # implying there was as a cache hit
        self.assertNotIn('abc_Scalastyle_compile_scalastyle will write to local artifact cache',
            pants_run.stdout_data)
  def test_pantsd_lifecycle_non_invalidation_on_config_string(self):
    with temporary_dir() as dist_dir_root, temporary_dir() as config_dir:
      config_files = [
        os.path.abspath(os.path.join(config_dir, 'pants.ini.{}'.format(i))) for i in range(2)
      ]
      for config_file in config_files:
        print('writing {}'.format(config_file))
        with open(config_file, 'wb') as fh:
          fh.write('[GLOBAL]\npants_distdir: {}\n'.format(os.path.join(dist_dir_root, 'v1')))

      invalidating_config = os.path.join(config_dir, 'pants.ini.invalidates')
      with open(invalidating_config, 'wb') as fh:
        fh.write('[GLOBAL]\npants_distdir: {}\n'.format(os.path.join(dist_dir_root, 'v2')))

      with self.pantsd_successful_run_context() as (pantsd_run, checker, _, _):
        variants = [['--pants-config-files={}'.format(f), 'help'] for f in config_files]
        pantsd_pid = None
        for cmd in itertools.chain(*itertools.repeat(variants, 2)):
          pantsd_run(cmd)
          if not pantsd_pid:
            pantsd_pid = checker.assert_started()
          else:
            checker.assert_running()

        pantsd_run(['--pants-config-files={}'.format(invalidating_config), 'help'])
        self.assertNotEqual(pantsd_pid, checker.assert_started())
示例#9
0
  def test_compile_antlr_cached(self):
    # Use the same temporary workdir because generated target's name includes the workdir.
    # Use the same artifact_cache dir to share artifacts across two runs.
    with temporary_dir(root_dir=self.workdir_root()) as tmp_workdir:
      with temporary_dir(root_dir=self.workdir_root()) as artifact_cache:
        # Note that this only works as a test with clean-all because AntlrGen does not use
        # the artifact cache, just the local build invalidator.  As such, the clean-all
        # actually forces it to re-gen _unlike_ the jvm compile tasks.  If AntlrGen starts
        # using the artifact cache this test will pass even if the generated synthetic target
        # was uncacheable due to changing fingerprints, because the .g files have not changed
        # between runs and AntlrGen will not re-gen.
        compile_antlr_args = [
          'clean-all',
          'compile',
          "--cache-gen-antlr-write-to=['{}']".format(artifact_cache),
          "--cache-gen-antlr-read-from=['{}']".format(artifact_cache),
          'examples/src/antlr/org/pantsbuild/example/exp::'
        ]
        # First run should generate and cache artifacts.
        pants_run = self.run_pants_with_workdir(compile_antlr_args, tmp_workdir)
        self.assert_success(pants_run)
        self.assertIn('Caching artifacts for 2 targets.', pants_run.stdout_data)

        # Sleeps 1 second to ensure the timestamp in sources generated by antlr is different.
        time.sleep(1)

        # Second run should use the cached artifacts (even with clean-all).
        pants_run = self.run_pants_with_workdir(compile_antlr_args, tmp_workdir)
        self.assert_success(pants_run)
        self.assertIn('Using cached artifacts for 2 targets.', pants_run.stdout_data)
示例#10
0
  def test_support_url_multi(self):
    """Tests to make sure existing base urls function as expected."""

    bootstrap_dir = '/tmp'

    with temporary_dir() as invalid_local_files, temporary_dir() as valid_local_files:
      binary_util = self._gen_binary_util(
        baseurls=[
          'BLATANTLY INVALID URL',
          'https://dl.bintray.com/pantsbuild/bin/reasonably-invalid-url',
          invalid_local_files,
          valid_local_files,
          'https://dl.bintray.com/pantsbuild/bin/another-invalid-url',
        ],
        bootstrap_dir=bootstrap_dir)

      binary_request = binary_util._make_deprecated_binary_request(
        supportdir='bin/protobuf',
        version='2.4.1',
        name='protoc')

      binary_path = binary_request.get_download_path(binary_util._host_platform())
      contents = b'proof'
      with safe_open(os.path.join(valid_local_files, binary_path), 'wb') as fp:
        fp.write(contents)

      binary_path_abs = os.path.join(bootstrap_dir, binary_path)

      self.assertEqual(os.path.realpath(binary_path_abs),
                       os.path.realpath(binary_util.select(binary_request)))

      self.assertEqual(contents, self._read_file(binary_path_abs))
  def _do_test_caching(self, *compiles):
    """Tests that the given compiles within the same workspace produce the given artifact counts."""
    with temporary_dir() as cache_dir, \
        self.temporary_workdir() as workdir, \
        temporary_dir(root_dir=get_buildroot()) as src_dir:

      def complete_config(config):
        # Clone the input config and add cache settings.
        cache_settings = {'write_to': [cache_dir], 'read_from': [cache_dir]}
        return dict(config.items() + [('cache.compile.zinc', cache_settings)])

      buildfile = os.path.join(src_dir, 'BUILD')
      spec = os.path.join(src_dir, ':cachetest')
      artifact_dir = os.path.join(cache_dir,
                                  ZincCompile.stable_name(),
                                  '{}.cachetest'.format(os.path.basename(src_dir)))

      for c in compiles:
        # Clear the src directory and recreate the files.
        safe_mkdir(src_dir, clean=True)
        self.create_file(buildfile,
                        """java_library(name='cachetest', sources=rglobs('*.java', '*.scala'))""")
        for name, content in c.srcfiles.items():
          self.create_file(os.path.join(src_dir, name), content)

        # Compile, and confirm that we have the right count of artifacts.
        self.run_compile(spec, complete_config(c.config), workdir)
        self.assertEquals(c.artifact_count, len(os.listdir(artifact_dir)))
    def test_checkstyle_cached(self):
        with temporary_dir(root_dir=self.workdir_root()) as cache:
            with temporary_dir(root_dir=self.workdir_root()) as workdir:
                args = [
                    "clean-all",
                    "compile.checkstyle",
                    "--cache-write-to=['{}']".format(cache),
                    "--cache-read-from=['{}']".format(cache),
                    "examples/tests/java/org/pantsbuild/example/hello/greet",
                    "-ldebug",
                ]

                pants_run = self.run_pants_with_workdir(args, workdir)
                self.assert_success(pants_run)
                self.assertIn(
                    "abc_Checkstyle_compile_checkstyle will write to local artifact cache", pants_run.stdout_data
                )

                pants_run = self.run_pants_with_workdir(args, workdir)
                self.assert_success(pants_run)
                self.assertIn(
                    "abc_Checkstyle_compile_checkstyle will read from local artifact cache", pants_run.stdout_data
                )
                # Make sure we are *only* reading from the cache and not also writing,
                # implying there was as a cache hit.
                self.assertNotIn(
                    "abc_Checkstyle_compile_checkstyle will write to local artifact cache", pants_run.stdout_data
                )
    def test_config_invalidates_targets(self, cache_args):
        with temporary_dir(root_dir=self.workdir_root()) as workdir:
            with temporary_dir(root_dir=self.workdir_root()) as tmp:
                configs = [
                    dedent(
                        """
              <module name="TreeWalker">
                <property name="tabWidth" value="2"/>
              </module>"""
                    ),
                    dedent(
                        """
              <module name="TreeWalker">
                <module name="LineLength">
                  <property name="max" value="100"/>
                </module>
              </module>"""
                    ),
                ]

                for config in configs:
                    # Ensure that even though the config files have the same name, their
                    # contents will invalidate the targets.
                    config_file = os.path.join(tmp, "config.xml")
                    self._create_config_file(config_file, config)
                    args = [
                        "clean-all",
                        "compile.checkstyle",
                        cache_args,
                        "examples/src/java/org/pantsbuild/example/hello/simple",
                        "--compile-checkstyle-configuration={}".format(config_file),
                    ]
                    pants_run = self.run_pants_with_workdir(args, workdir)
                    self.assert_success(pants_run)
    def test_java_compile_with_different_resolved_jars_produce_different_artifacts(self):
        # Since unforced dependencies resolve to the highest version including transitive jars,
        # We want to ensure that running java compile with binary incompatible libraries will
        # produces two different artifacts.

        with temporary_dir(root_dir=self.workdir_root()) as workdir, temporary_dir() as cache_dir:
            path_prefix = "testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility"
            dotted_path = path_prefix.replace(os.path.sep, ".")
            artifact_dir = os.path.join(
                cache_dir, ZincCompile.stable_name(), "{}.jarversionincompatibility".format(dotted_path)
            )
            config = {
                "cache.compile.zinc": {"write_to": [cache_dir], "read_from": [cache_dir]},
                "compile.zinc": {"incremental_caching": True},
            }

            pants_run = self.run_pants_with_workdir(
                ["compile", ("{}:only-15-directly".format(path_prefix))], workdir, config
            )
            self.assert_success(pants_run)

            # One artifact for guava 15
            self.assertEqual(len(os.listdir(artifact_dir)), 1)

            # Rerun for guava 16
            pants_run = self.run_pants_with_workdir(
                ["compile", ("{}:alongside-16".format(path_prefix))], workdir, config
            )
            self.assert_success(pants_run)

            # One artifact for guava 15 and one for guava 16
            self.assertEqual(len(os.listdir(artifact_dir)), 2)
  def test_java_compile_reads_resource_mapping(self):
    # Ensure that if an annotation processor produces a resource-mapping,
    # the artifact contains that resource mapping.

    with temporary_dir() as cache_dir:
      artifact_dir = os.path.join(cache_dir, 'JavaCompile',
                                  'testprojects.src.java.com.pants.testproject.annotation.main.main')
      config = {'compile.java': {'write_artifact_caches': [cache_dir]}}

      pants_run = self.run_pants(['compile',
                                  'testprojects/src/java/com/pants/testproject/annotation/main'],
                                 config)
      self.assert_success(pants_run)

      self.assertTrue(os.path.exists(artifact_dir))
      artifacts = os.listdir(artifact_dir)
      self.assertEqual(len(artifacts), 1)

      with temporary_dir() as extract_dir:
        TarArchiver.extract(os.path.join(artifact_dir, artifacts[0]), extract_dir)
        all_files = set()
        for dirpath, dirs, files in safe_walk(extract_dir):
          for name in files:
            path = os.path.join(dirpath, name)
            all_files.add(path)

        report_file_name = os.path.join(extract_dir, 'compile/jvm/java/classes/deprecation_report.txt')
        self.assertIn(report_file_name, all_files)

        annotated_classes = [line.rstrip() for line in file(report_file_name).read().splitlines()]
        self.assertEquals(
          {'com.pants.testproject.annotation.main.Main', 'com.pants.testproject.annotation.main.Main$TestInnerClass'},
          set(annotated_classes))
  def test_stale_apt_with_deps(self):
    """An annotation processor with a dependency doesn't pollute other annotation processors.

    At one point, when you added an annotation processor, it stayed configured for all subsequent
    compiles.  Meaning that if that annotation processor had a dep that wasn't on the classpath,
    subsequent compiles would fail with missing symbols required by the stale annotation processor.
    """

    # Demonstrate that the annotation processor is working
    with self.do_test_compile(
        'testprojects/src/java/org/pantsbuild/testproject/annotation/processorwithdep/main',
        expected_files=['Main.class', 'Main_HelloWorld.class', 'Main_HelloWorld.java']) as found:
      gen_file = self.get_only(found, 'Main_HelloWorld.java')
      self.assertTrue(gen_file.endswith(
        'org/pantsbuild/testproject/annotation/processorwithdep/main/Main_HelloWorld.java'),
        msg='{} does not match'.format(gen_file))


    # Try to reproduce second compile that fails with missing symbol
    with temporary_dir(root_dir=self.workdir_root()) as workdir:
      with temporary_dir(root_dir=self.workdir_root()) as cachedir:
        # This annotation processor has a unique external dependency
        self.assert_success(self.run_test_compile(
          workdir,
          cachedir,
          'testprojects/src/java/org/pantsbuild/testproject/annotation/processorwithdep::'))

        # When we run a second compile with annotation processors, make sure the previous annotation
        # processor doesn't stick around to spoil the compile
        self.assert_success(self.run_test_compile(
          workdir,
          cachedir,
          'testprojects/src/java/org/pantsbuild/testproject/annotation/processor::',
          clean_all=False))
  def do_test_compile(self, target,
      expected_files=None, iterations=2, expect_failure=False, extra_args=None):
    """Runs a configurable number of iterations of compilation for the given target.

    By default, runs twice to shake out errors related to noops.
    """
    with temporary_dir(root_dir=self.workdir_root()) as workdir:
      with temporary_dir(root_dir=self.workdir_root()) as cachedir:
        for i in six.moves.xrange(0, iterations):
          pants_run = self.run_test_compile(workdir, cachedir, target,
                                            clean_all=(i == 0),
                                            extra_args=extra_args)
          if expect_failure:
            self.assert_failure(pants_run)
          else:
            self.assert_success(pants_run)
        found = defaultdict(set)
        workdir_files = []
        if expected_files:
          to_find = set(expected_files)
          for root, _, files in os.walk(workdir):
            for file in files:
              workdir_files.append(os.path.join(root, file))
              if file in to_find:
                found[file].add(os.path.join(root, file))
          to_find.difference_update(found)
          if not expect_failure:
            self.assertEqual(set(), to_find,
                             'Failed to find the following compiled files: {} in {}'.format(
                               to_find, '\n'.join(sorted(workdir_files))))
        yield found
  def test_generating_report_from_fetch(self):
    # Ensure the ivy report file gets generated and populated.
    with self.temporary_workdir() as workdir, temporary_dir() as cache_dir:
      config = {'cache': {'write_to': [cache_dir],'read_from': [cache_dir]}}

      def run_pants(command):
        return self.run_pants_with_workdir(command, workdir, config=config)
      with temporary_dir() as ivy_report_dir:
        first_run = run_pants(['resolve', '3rdparty:junit', '--resolve-ivy-report',
                               '--resolve-ivy-outdir={reportdir}'.format(reportdir=ivy_report_dir)])
        self.assert_success(first_run)

        html_report_file, listdir = self._find_html_report(ivy_report_dir)
        self.assertIsNotNone(html_report_file,
                        msg="Couldn't find ivy report in {report_dir} containing files {listdir}"
                        .format(report_dir=ivy_report_dir, listdir=listdir))

        with open(os.path.join(ivy_report_dir, html_report_file)) as file:
          self.assertIn('junit', file.read())

      run_pants(['clean-all'])

      with temporary_dir() as ivy_report_dir:
        fetch_run = run_pants(['resolve', '3rdparty:junit', '--resolve-ivy-report',
                               '--resolve-ivy-outdir={reportdir}'.format(reportdir=ivy_report_dir)])
        self.assert_success(fetch_run)

        # Find the ivy report.
        html_report_file, listdir = self._find_html_report(ivy_report_dir)
        self.assertIsNotNone(html_report_file,
                        msg="Couldn't find ivy report in {report_dir} containing files {listdir}"
                        .format(report_dir=ivy_report_dir, listdir=listdir))

        with open(os.path.join(ivy_report_dir, html_report_file)) as file:
          self.assertIn('junit', file.read())
  def test_incremental_caching(self):
    with temporary_dir(root_dir=self.pants_workdir) as jar_dir, \
         temporary_dir(root_dir=self.pants_workdir) as dist_dir:
      self.set_options(pants_distdir=dist_dir)

      target = self.make_target(
        'java/classpath:java_lib',
        target_type=JavaLibrary,
        sources=['com/foo/Bar.java'],
      )
      context = self.context(target_roots=[target])
      runtime_classpath = context.products.get_data('runtime_classpath',
                                                    init_func=ClasspathProducts.init_func(self.pants_workdir))
      task = self.create_task(context)

      target_classpath_output = os.path.join(dist_dir, self.options_scope)

      # Create a classpath entry.
      touch(os.path.join(jar_dir, 'z1.jar'))
      runtime_classpath.add_for_target(target, [(self.DEFAULT_CONF, os.path.join(jar_dir, 'z1.jar'))])
      task.execute()
      # Check only one symlink and classpath.txt were created.
      self.assertEqual(len(os.listdir(target_classpath_output)), 2)
      self.assertEqual(
        os.path.realpath(os.path.join(target_classpath_output,
                                      sorted(os.listdir(target_classpath_output))[0])),
        os.path.join(jar_dir, 'z1.jar')
      )

      # Remove the classpath entry.
      runtime_classpath.remove_for_target(target, [(self.DEFAULT_CONF, os.path.join(jar_dir, 'z1.jar'))])

      # Add a different classpath entry
      touch(os.path.join(jar_dir, 'z2.jar'))
      runtime_classpath.add_for_target(target, [(self.DEFAULT_CONF, os.path.join(jar_dir, 'z2.jar'))])
      task.execute()
      # Check the symlink was updated.
      self.assertEqual(len(os.listdir(target_classpath_output)), 2)
      self.assertEqual(
        os.path.realpath(os.path.join(target_classpath_output,
                                      sorted(os.listdir(target_classpath_output))[0])),
        os.path.join(jar_dir, 'z2.jar')
      )

      # Add a different classpath entry
      touch(os.path.join(jar_dir, 'z3.jar'))
      runtime_classpath.add_for_target(target, [(self.DEFAULT_CONF, os.path.join(jar_dir, 'z3.jar'))])
      task.execute()
      self.assertEqual(len(os.listdir(target_classpath_output)), 3)

      classpath = sorted(os.listdir(target_classpath_output))[2]
      with safe_open(os.path.join(target_classpath_output, classpath)) as classpath_file:
        # Assert there is only one line ending with a newline
        self.assertListEqual(
          classpath_file.readlines(),
          [
            os.pathsep.join([os.path.join(jar_dir, 'z2.jar'), os.path.join(jar_dir, 'z3.jar')]) + '\n'
          ]
        )
示例#20
0
  def _test_jar_lib_with_url(self, load_all):
    with self.temporary_workdir() as workdir:
      with self.temporary_sourcedir() as source_dir:
        with temporary_dir() as dist_dir:
          os.makedirs(os.path.join(source_dir, 'src'))
          with open(os.path.join(source_dir, 'src', 'BUILD.one'), 'w+') as f:
            f.write(dedent("""
              jvm_binary(name='synthetic',
                source='Main.java',
              )
            """))
          with open(os.path.join(source_dir, 'src', 'Main.java'), 'w+') as f:
            f.write(dedent("""
              public class Main {
                public static void main(String[] args) {
                  System.out.println("Hello.");
                }
              }
            """))
          with open(os.path.join(source_dir, 'src', 'Foo.java'), 'w+') as f:
            f.write(dedent("""
              public class Foo {
                public static void main(String[] args) {
                  Main.main(args);
                }
              }
            """))

          binary_target = '{}:synthetic'.format(os.path.join(source_dir, 'src'))
          pants_run = self.run_pants_with_workdir(['binary', binary_target,
                                                   '--pants-distdir={}'.format(dist_dir)], workdir)
          self.assert_success(pants_run)
          jar_path = os.path.realpath(os.path.join(dist_dir, 'synthetic.jar'))
          self.assertTrue(os.path.exists(jar_path), 'Synthetic binary was not created!')
          jar_url = 'file://{}'.format(os.path.abspath(jar_path))

          with open(os.path.join(source_dir, 'src', 'BUILD.two'), 'w+') as f:
            f.write(dedent("""
              jar_library(name='lib_with_url',
                jars=[
                  jar(org='org.pantsbuild', name='synthetic-test-jar', rev='1.2.3',
                  url='{jar_url}')
                ],
              )

              java_library(name='src',
                sources=['Foo.java'],
                dependencies=[':lib_with_url'],
              )
            """).format(jar_url=jar_url))

          spec_names = ['lib_with_url', 'src']

          targets = ['{0}:{1}'.format(os.path.join(source_dir, 'src'), name) for name in spec_names]

          with temporary_dir() as ivy_temp_dir:
            extra_args = ['--ivy-cache-dir={}'.format(ivy_temp_dir)]
            self.evaluate_subtask(targets, workdir, load_all, extra_args=extra_args,
                                  expected_jars=['org.pantsbuild:synthetic-test-jar:1.2.3'])
    def _do_test_stale_artifacts_rmd_when_cache_used(self, tool_name):
        with temporary_dir() as cache_dir, temporary_dir(root_dir=self.workdir_root()) as workdir, temporary_dir(
            root_dir=get_buildroot()
        ) as src_dir:

            config = {"cache.compile.{}".format(tool_name): {"write_to": [cache_dir], "read_from": [cache_dir]}}

            self.create_file(
                os.path.join(src_dir, "org", "pantsbuild", "cachetest", "A.java"),
                dedent(
                    """package org.pantsbuild.cachetest;
                          class A {}
                          class Main {}"""
                ),
            )
            self.create_file(
                os.path.join(src_dir, "org", "pantsbuild", "cachetest", "BUILD"),
                dedent(
                    """java_library(name='cachetest',
                                       sources=['A.java']
                          )"""
                ),
            )

            cachetest_spec = os.path.join(os.path.basename(src_dir), "org", "pantsbuild", "cachetest:cachetest")

            # Caches values A.class, Main.class
            self.run_compile(cachetest_spec, config, "isolated", workdir, tool_name)

            self.create_file(
                os.path.join(src_dir, "org", "pantsbuild", "cachetest", "A.java"),
                dedent(
                    """package org.pantsbuild.cachetest;
                            class A {}
                            class NotMain {}"""
                ),
            )
            # Caches values A.class, NotMain.class and leaves them on the filesystem
            self.run_compile(cachetest_spec, config, "isolated", workdir, tool_name)

            self.create_file(
                os.path.join(src_dir, "org", "pantsbuild", "cachetest", "A.java"),
                dedent(
                    """package org.pantsbuild.cachetest;
                          class A {}
                          class Main {}"""
                ),
            )

            # Should cause NotMain.class to be removed
            self.run_compile(cachetest_spec, config, "isolated", workdir, tool_name)

            cachetest_id = cachetest_spec.replace(":", ".").replace(os.sep, ".")

            class_file_dir = os.path.join(
                workdir, "compile", "jvm", tool_name, "isolated-classes", cachetest_id, "org", "pantsbuild", "cachetest"
            )
            self.assertEqual(sorted(os.listdir(class_file_dir)), sorted(["A.class", "Main.class"]))
  def test_stale_artifacts_rmd_when_cache_used_with_zinc(self):
    with temporary_dir() as cache_dir, \
        self.temporary_workdir() as workdir, \
        temporary_dir(root_dir=get_buildroot()) as src_dir:

      config = {
        'cache.compile.zinc': {'write_to': [cache_dir], 'read_from': [cache_dir]},
        'compile.zinc': {'incremental_caching': True },
      }

      srcfile = os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'A.java')
      buildfile = os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'BUILD')

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class A {}
                          class Main {}"""))
      self.create_file(buildfile,
                       dedent("""java_library(name='cachetest',
                                       sources=['A.java']
                          )"""))

      cachetest_spec = os.path.join(os.path.basename(src_dir), 'org', 'pantsbuild',
                                    'cachetest:cachetest')

      # Caches values A.class, Main.class
      self.run_compile(cachetest_spec, config, workdir)

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                            class A {}
                            class NotMain {}"""))
      # Caches values A.class, NotMain.class and leaves them on the filesystem
      self.run_compile(cachetest_spec, config, workdir)

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class A {}
                          class Main {}"""))

      # Should cause NotMain.class to be removed
      self.run_compile(cachetest_spec, config, workdir)

      root = os.path.join(workdir, 'compile', 'jvm', 'zinc')
      # One target.
      self.assertEqual(len(os.listdir(root)), 1)
      target_workdir_root = os.path.join(root, os.listdir(root)[0])
      target_workdirs = os.listdir(target_workdir_root)
      # Two workdirs.
      self.assertEqual(len(target_workdirs), 2)

      def classfiles(d):
        cd = os.path.join(target_workdir_root, d, 'classes', 'org', 'pantsbuild', 'cachetest')
        return sorted(os.listdir(cd))

      # One workdir should contain NotMain, and the other should contain Main.
      self.assertEquals(sorted(classfiles(w) for w in target_workdirs),
                        sorted([['A.class', 'Main.class'], ['A.class', 'NotMain.class']]))
示例#23
0
  def bundle_and_run(self, target, bundle_name, bundle_jar_name=None, bundle_options=None,
                     args=None,
                     expected_bundle_jar_content=None,
                     expected_bundle_content=None,
                     library_jars_are_symlinks=True):
    """Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle.

    :param target: target name to compile
    :param bundle_name: resulting bundle filename (minus .zip extension)
    :param bundle_jar_name: monolithic jar filename (minus .jar extension), if None will be the
      same as bundle_name
    :param bundle_options: additional options for bundle
    :param args: optional arguments to pass to executable
    :param expected_bundle_content: verify the bundle zip content
    :param expected_bundle_jar_content: verify the bundle jar content
    :param library_jars_are_symlinks: verify library jars are symlinks if True, and actual
      files if False. Default `True` because we always create symlinks for both external and internal
      dependencies, only exception is when shading is used.
    :return: stdout as a string on success, raises an Exception on error
    """
    bundle_jar_name = bundle_jar_name or bundle_name
    bundle_options = bundle_options or []
    bundle_options = ['bundle.jvm'] + bundle_options + ['--archive=zip', target]
    with self.pants_results(bundle_options) as pants_run:
      self.assert_success(pants_run)

      self.assertTrue(check_symlinks('dist/{bundle_name}-bundle/libs'.format(bundle_name=bundle_name),
                                     library_jars_are_symlinks))
      # TODO(John Sirois): We need a zip here to suck in external library classpath elements
      # pointed to by symlinks in the run_pants ephemeral tmpdir.  Switch run_pants to be a
      # contextmanager that yields its results while the tmpdir workdir is still active and change
      # this test back to using an un-archived bundle.
      with temporary_dir() as workdir:
        ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir)
        if expected_bundle_content:
          self.assertTrue(contains_exact_files(workdir, expected_bundle_content))
        if expected_bundle_jar_content:
          with temporary_dir() as check_bundle_jar_dir:
            bundle_jar = os.path.join(workdir, '{bundle_jar_name}.jar'
                                      .format(bundle_jar_name=bundle_jar_name))
            ZIP.extract(bundle_jar, check_bundle_jar_dir)
            self.assertTrue(contains_exact_files(check_bundle_jar_dir, expected_bundle_jar_content))

        optional_args = []
        if args:
          optional_args = args
        java_run = subprocess.Popen(['java',
                                     '-jar',
                                     '{bundle_jar_name}.jar'.format(bundle_jar_name=bundle_jar_name)]
                                    + optional_args,
                                    stdout=subprocess.PIPE,
                                    cwd=workdir)

        stdout, _ = java_run.communicate()
      java_returncode = java_run.returncode
      self.assertEquals(java_returncode, 0)
      return stdout
    def test_basic_binary(self):
        with temporary_dir() as cache_dir:
            config = {"cache.compile.zinc": {"write_to": [cache_dir]}}

            with temporary_dir(root_dir=self.workdir_root()) as workdir:
                pants_run = self.run_pants_with_workdir(
                    ["compile", "testprojects/src/java/org/pantsbuild/testproject/publish/hello/main:"], workdir, config
                )
                self.assert_success(pants_run)
示例#25
0
 def test_preferred_jvm_distributions(self):
   with temporary_dir() as strict_jdk_home:
     with temporary_dir() as non_strict_jdk_home:
       strict_cache_key = (Revision(1, 6), Revision(1, 6, 9999), False)
       non_strict_cache_key = (Revision(1, 6), None, False)
       DistributionLocator._CACHE[strict_cache_key] = Distribution(home_path=strict_jdk_home)
       DistributionLocator._CACHE[non_strict_cache_key] = Distribution(home_path=non_strict_jdk_home)
       self.assertEqual({'strict': strict_jdk_home, 'non_strict': non_strict_jdk_home},
                        self.execute_export_json()['preferred_jvm_distributions']['java6'])
 def test_in_process(self, strategy):
   with temporary_dir(root_dir=self.workdir_root()) as workdir:
     with temporary_dir(root_dir=self.workdir_root()) as cachedir:
       pants_run = self.run_test_compile(
         workdir, cachedir, 'examples/src/java/org/pantsbuild/example/hello/main', strategy,
         extra_args=['--no-compile-java-use-jmake', '-ldebug'], clean_all=True
       )
       self.assertIn('Attempting to call com.sun.tools.javac.api.JavacTool', pants_run.stdout_data)
       self.assertNotIn('Forking javac', pants_run.stdout_data)
示例#27
0
 def _create_tiny_git_repo(self):
   with temporary_dir() as gitdir,\
        temporary_dir() as worktree:
     # A tiny little fake git repo we will set up. initialize_repo() requires at least one file.
     readme_file = os.path.join(worktree, 'README')
     touch(readme_file)
     # The contextmanager interface is only necessary if an explicit gitdir is not provided.
     with initialize_repo(worktree, gitdir=gitdir) as git:
       yield git, worktree, gitdir
  def _do_test_stale_artifacts_rmd_when_cache_used(self, tool_name):
    with temporary_dir() as cache_dir, \
        temporary_dir(root_dir=self.workdir_root()) as workdir, \
        temporary_dir(root_dir=get_buildroot()) as src_dir:

      config = {
        'cache.compile.{}'.format(tool_name): {'write_to': [cache_dir], 'read_from': [cache_dir]},
        'compile.java': {'use_jmake': tool_name == 'java' },
        'compile.zinc': {'incremental_caching': True },
      }

      srcfile = os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'A.java')
      buildfile = os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'BUILD')

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class A {}
                          class Main {}"""))
      self.create_file(buildfile,
                       dedent("""java_library(name='cachetest',
                                       sources=['A.java']
                          )"""))

      cachetest_spec = os.path.join(os.path.basename(src_dir), 'org', 'pantsbuild',
                                    'cachetest:cachetest')

      # Caches values A.class, Main.class
      self.run_compile(cachetest_spec, config, workdir, tool_name)

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                            class A {}
                            class NotMain {}"""))
      # Caches values A.class, NotMain.class and leaves them on the filesystem
      self.run_compile(cachetest_spec, config, workdir, tool_name)

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class A {}
                          class Main {}"""))

      # Should cause NotMain.class to be removed
      self.run_compile(cachetest_spec, config, workdir, tool_name)

      cachetest_id = cachetest_spec.replace(':', '.').replace(os.sep, '.')

      class_file_dir = os.path.join(workdir,
                                      'compile',
                                      'jvm',
                                      tool_name,
                                      'isolated-classes',
                                      cachetest_id,
                                      'org',
                                      'pantsbuild',
                                      'cachetest',
                                      )
      self.assertEqual(sorted(os.listdir(class_file_dir)), sorted(['A.class', 'Main.class']))
 def test_log_level(self, strategy):
   with temporary_dir(root_dir=self.workdir_root()) as workdir:
     with temporary_dir(root_dir=self.workdir_root()) as cachedir:
       target = 'testprojects/src/java/org/pantsbuild/testproject/dummies:compilation_failure_target'
       pants_run = self.run_test_compile(
         workdir, cachedir, target, strategy,
         extra_args=['--no-compile-java-use-jmake', '--no-color'], clean_all=True
       )
       self.assertIn('[warn] sun.security.x509.X500Name', pants_run.stdout_data)
       self.assertIn('[error] System2.out', pants_run.stdout_data)
  def test_invalidate_compiles_when_scopes_change(self):
    with temporary_dir(root_dir=get_buildroot()) as workdir_parent:
      workdir = os.path.join(workdir_parent, '.pants.d')
      os.makedirs(workdir)
      with temporary_dir(root_dir=get_buildroot()) as tmp_project:
        with open(os.path.join(tmp_project, 'Foo.java'), 'w') as f:
          f.write('public class Foo {}')
        with open(os.path.join(tmp_project, 'Bar.java'), 'w') as f:
          f.write('public class Bar extends Foo {}')

        def spec(name):
          return '{}:{}'.format(os.path.basename(tmp_project), name)

        def write_build(scope):
          with open(os.path.join(tmp_project, 'BUILD'), 'w') as f:
            f.write(dedent('''
              java_library(name='foo',
                sources=['Foo.java'],
              )
              java_library(name='bar',
                sources=['Bar.java'],
                dependencies=[
                  scoped(scope='{scope}', address=':foo'),
                ],
              )
              jvm_binary(name='bin',
                main='Foo',
                dependencies=[':foo'],
              )
            ''').strip().format(scope=scope))

        write_build('')
        self.assert_success(self.run_pants_with_workdir([
          '--no-java-strict-deps', 'compile', spec('bar'),
        ], workdir=workdir), msg='Normal build from a clean cache failed. Something may be wrong '
                                 'with the test setup.')

        write_build('runtime')
        self.assert_failure(self.run_pants_with_workdir([
          '--no-java-strict-deps', 'compile', spec('bar'),
        ], workdir=workdir), msg='Build from a dirty cache with the dependency on :foo scoped to '
                                 'runtime passed, when it should have had a compile failure. The '
                                 'cache may not have been invalidated.')

        write_build('compile')
        self.assert_success(self.run_pants_with_workdir([
          '--no-java-strict-deps', 'compile', spec('bar'),
        ], workdir=workdir), msg='Build from a dirty cache with the scope changed to compile '
                                 'failed. The cache may not have been invalidated.')

        write_build('compile')
        self.assert_failure(self.run_pants_with_workdir([
          '--no-java-strict-deps', 'run', spec('bin'),
        ], workdir=workdir), msg='Attempt to run binary with the dependency on :foo scoped to '
                                 'compile passed. This should have caused a runtime failure.')
示例#31
0
    def _test_jar_lib_with_url(self, load_all):
        with self.temporary_workdir() as workdir:
            with self.temporary_sourcedir() as source_dir:
                with temporary_dir() as dist_dir:
                    os.makedirs(os.path.join(source_dir, 'src'))
                    with open(os.path.join(source_dir, 'src', 'BUILD.one'),
                              'w+') as f:
                        f.write(
                            dedent("""
              jvm_binary(name='synthetic',
                source='Main.java',
              )
            """))
                    with open(os.path.join(source_dir, 'src', 'Main.java'),
                              'w+') as f:
                        f.write(
                            dedent("""
              public class Main {
                public static void main(String[] args) {
                  System.out.println("Hello.");
                }
              }
            """))
                    with open(os.path.join(source_dir, 'src', 'Foo.java'),
                              'w+') as f:
                        f.write(
                            dedent("""
              public class Foo {
                public static void main(String[] args) {
                  Main.main(args);
                }
              }
            """))

                    binary_target = '{}:synthetic'.format(
                        os.path.join(source_dir, 'src'))
                    pants_run = self.run_pants_with_workdir([
                        'binary', binary_target,
                        '--pants-distdir={}'.format(dist_dir)
                    ], workdir)
                    self.assert_success(pants_run)
                    jar_path = os.path.realpath(
                        os.path.join(dist_dir, 'synthetic.jar'))
                    self.assertTrue(os.path.exists(jar_path),
                                    'Synthetic binary was not created!')
                    jar_url = 'file://{}'.format(os.path.abspath(jar_path))

                    with open(os.path.join(source_dir, 'src', 'BUILD.two'),
                              'w+') as f:
                        f.write(
                            dedent("""
              jar_library(name='lib_with_url',
                jars=[
                  jar(org='org.pantsbuild', name='synthetic-test-jar', rev='1.2.3',
                  url='{jar_url}')
                ],
              )

              java_library(name='src',
                sources=['Foo.java'],
                dependencies=[':lib_with_url'],
              )
            """).format(jar_url=jar_url))

                    spec_names = ['lib_with_url', 'src']

                    targets = [
                        '{0}:{1}'.format(os.path.join(source_dir, 'src'), name)
                        for name in spec_names
                    ]

                    with temporary_dir() as ivy_temp_dir:
                        extra_args = [
                            '--ivy-cache-dir={}'.format(ivy_temp_dir)
                        ]
                        self.evaluate_subtask(
                            targets,
                            workdir,
                            load_all,
                            extra_args=extra_args,
                            expected_jars=[
                                'org.pantsbuild:synthetic-test-jar:1.2.3'
                            ])
示例#32
0
    def test_reset_log_location(self):
        sink = self._gen_sink_subclass()

        with temporary_dir() as tmpdir:
            sink.reset_log_location(tmpdir)
            self.assertEqual(tmpdir, sink._log_dir)
示例#33
0
 def test_rm_rf_nonexistent(self, file_name="./non_existent_file") -> None:
     with temporary_dir() as td, pushd(td):
         rm_rf(file_name)
示例#34
0
 def test_rm_rf_file(self, file_name="./foo") -> None:
     with temporary_dir() as td, pushd(td):
         touch(file_name)
         self.assertTrue(os.path.isfile(file_name))
         rm_rf(file_name)
         self.assertFalse(os.path.exists(file_name))
示例#35
0
 def _temp_cache_dir(self):
     with temporary_dir() as cache_dir:
         self.set_options_for_scope('cache.{}'.format(self.options_scope),
                                    read_from=[cache_dir],
                                    write_to=[cache_dir])
         yield
示例#36
0
    def test_permissions(self) -> None:
        with temporary_file(permissions=0o700) as f:
            self.assertEqual(0o700, os.stat(f.name)[0] & 0o777)

        with temporary_dir(permissions=0o644) as path:
            self.assertEqual(0o644, os.stat(path)[0] & 0o777)
示例#37
0
 def test_rm_rf_dir(self, dir_name="./bar") -> None:
     with temporary_dir() as td, pushd(td):
         safe_mkdir(dir_name)
         self.assertTrue(os.path.isdir(dir_name))
         rm_rf(dir_name)
         self.assertFalse(os.path.exists(dir_name))
    def test_hermetic_binary_cache_with_dependencies(self):
        file_abs_path = os.path.join(
            get_buildroot(),
            'examples/src/scala/org/pantsbuild/example/hello/exe/Exe.scala')

        with temporary_dir() as cache_dir:
            config = {
                'cache.compile.rsc': {
                    'write_to': [cache_dir],
                    'read_from': [cache_dir]
                },
                'compile.rsc': {
                    'execution_strategy': 'hermetic',
                    'use_classpath_jars': False,
                    'incremental': False,
                }
            }

            with self.temporary_workdir() as workdir:
                pants_run = self.run_pants_with_workdir(
                    [
                        '-q',
                        'run',
                        'examples/src/scala/org/pantsbuild/example/hello/exe',
                    ],
                    workdir,
                    config,
                )
                self.assert_success(pants_run)
                self.assertIn(
                    'Num args passed: 0. Stand by for welcome...\nHello, Resource World!',
                    pants_run.stdout_data,
                )

                compile_dir = os.path.join(workdir, 'compile', 'rsc',
                                           'current')

                for path_suffix in [
                        'examples.src.scala.org.pantsbuild.example.hello.exe.exe/current/zinc/classes/org/pantsbuild/example/hello/exe/Exe.class',
                        'examples.src.scala.org.pantsbuild.example.hello.welcome.welcome/current/zinc/classes/org/pantsbuild/example/hello/welcome/WelcomeEverybody.class',
                ]:
                    path = os.path.join(compile_dir, path_suffix)
                    self.assertTrue(os.path.exists(path),
                                    "Want path {} to exist".format(path))
                with self.with_overwritten_file_content(file_abs_path):

                    new_temp_test = '''package org.pantsbuild.example.hello.exe
                              
                              import java.io.{BufferedReader, InputStreamReader}
                              import org.pantsbuild.example.hello
                              import org.pantsbuild.example.hello.welcome
                              
                              // A simple jvm binary to illustrate Scala BUILD targets
                              
                              object Exe {
                                /** Test that resources are properly namespaced. */
                                def getWorld: String = {
                                  val is =
                                    this.getClass.getClassLoader.getResourceAsStream(
                                      "org/pantsbuild/example/hello/world.txt"
                                    )
                                  try {
                                    new BufferedReader(new InputStreamReader(is)).readLine()
                                  } finally {
                                    is.close()
                                  }
                                }
                              
                                def main(args: Array[String]) {
                                  println("Num args passed: " + args.size + ". Stand by for welcome...")
                                  if (args.size <= 0) {
                                    println("Hello, and welcome to " + getWorld + "!")
                                  } else {
                                    val w = welcome.WelcomeEverybody(args)
                                    w.foreach(s => println(s))
                                  }
                                }
                              }'''

                    with open(file_abs_path, 'w') as f:
                        f.write(new_temp_test)

                    pants_run = self.run_pants_with_workdir(
                        [
                            '-q',
                            'run',
                            'examples/src/scala/org/pantsbuild/example/hello/exe',
                        ],
                        workdir,
                        config,
                    )
                    self.assert_success(pants_run)
                    self.assertIn(
                        'Num args passed: 0. Stand by for welcome...\nHello, and welcome to Resource World!',
                        pants_run.stdout_data,
                    )

                    compile_dir = os.path.join(workdir, 'compile', 'rsc',
                                               'current')

                    for path_suffix in [
                            'examples.src.scala.org.pantsbuild.example.hello.exe.exe/current/zinc/classes/org/pantsbuild/example/hello/exe/Exe.class',
                            'examples.src.scala.org.pantsbuild.example.hello.welcome.welcome/current/zinc/classes/org/pantsbuild/example/hello/welcome/WelcomeEverybody.class',
                    ]:
                        path = os.path.join(compile_dir, path_suffix)
                        self.assertTrue(os.path.exists(path),
                                        "Want path {} to exist".format(path))
示例#39
0
 def assert_dump_and_read(self, test_content, dump_kwargs, read_kwargs):
     with temporary_dir() as td:
         test_filename = os.path.join(td, "test.out")
         safe_file_dump(test_filename, test_content, **dump_kwargs)
         self.assertEqual(read_file(test_filename, **read_kwargs),
                          test_content)
示例#40
0
 def tmp_scalastyle_config(self):
     with temporary_dir(root_dir=get_buildroot()) as scalastyle_dir:
         path = os.path.join(scalastyle_dir, "config.xml")
         safe_file_dump(path, """<scalastyle/>""")
         yield f"--scalastyle-config={path}"
    def _create_binary(self, binary_tgt, results_dir):
        """Create a .pex file for the specified binary target."""
        # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX
        # and PYTHON_SOURCES products, because those products are already-built pexes, and there's
        # no easy way to merge them into a single pex file (for example, they each have a __main__.py,
        # metadata, and so on, which the merging code would have to handle specially).
        interpreter = self.context.products.get_data(PythonInterpreter)
        with temporary_dir() as tmpdir:
            # Create the pex_info for the binary.
            build_properties = PexInfo.make_build_properties()
            if self.get_options().include_run_information:
                run_info_dict = self.context.run_tracker.run_info.get_as_dict()
                build_properties.update(run_info_dict)
            pex_info = binary_tgt.pexinfo.copy()
            pex_info.build_properties = build_properties

            pex_builder = PexBuilderWrapper.Factory.create(
                builder=PEXBuilder(path=tmpdir,
                                   interpreter=interpreter,
                                   pex_info=pex_info,
                                   copy=True),
                log=self.context.log,
                generate_ipex=self._generate_ipex,
            )

            if binary_tgt.shebang:
                self.context.log.info(
                    "Found Python binary target {} with customized shebang, using it: {}"
                    .format(binary_tgt.name, binary_tgt.shebang))
                pex_builder.set_shebang(binary_tgt.shebang)
            else:
                self.context.log.debug(
                    f"No customized shebang found for {binary_tgt.name}")

            # Find which targets provide sources and which specify requirements.
            source_tgts = []
            req_tgts = []
            constraint_tgts = []
            for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE):
                if has_python_sources(tgt) or has_resources(tgt):
                    source_tgts.append(tgt)
                elif has_python_requirements(tgt):
                    req_tgts.append(tgt)
                if is_python_target(tgt):
                    constraint_tgts.append(tgt)

            # Add interpreter compatibility constraints to pex info. Note that we only add the constraints for the final
            # binary target itself, not its dependencies. The upstream interpreter selection tasks will already validate that
            # there are no compatibility conflicts among the dependencies and target. If the binary target does not have
            # `compatibility` in its BUILD entry, the global --python-setup-interpreter-constraints will be used.
            pex_builder.add_interpreter_constraints_from([binary_tgt])

            # Dump everything into the builder's chroot.
            for tgt in source_tgts:
                pex_builder.add_sources_from(tgt)

            # We need to ensure that we are resolving for only the current platform if we are
            # including local python dist targets that have native extensions.
            self._python_native_code_settings.check_build_for_current_platform_only(
                self.context.targets())
            pex_builder.add_requirement_libs_from(
                req_tgts, platforms=binary_tgt.platforms)

            # Build the .pex file.
            pex_filename = self._get_output_pex_filename(binary_tgt.name)
            pex_path = os.path.join(results_dir, pex_filename)
            pex_builder.build(pex_path)
            return pex_path
示例#42
0
def test_remove_prefix(rule_runner: RuleRunner) -> None:
    relevant_files = (
        "characters/dark_tower/roland",
        "characters/dark_tower/susannah",
    )
    all_files = (
        "books/dark_tower/gunslinger",
        "characters/altered_carbon/kovacs",
        *relevant_files,
        "index",
    )

    with temporary_dir() as temp_dir:
        safe_file_dump(os.path.join(temp_dir, "index"), "books\ncharacters\n")
        safe_file_dump(
            os.path.join(temp_dir, "characters", "altered_carbon", "kovacs"),
            "Envoy",
            makedirs=True,
        )

        tower_dir = os.path.join(temp_dir, "characters", "dark_tower")
        safe_file_dump(os.path.join(tower_dir, "roland"),
                       "European Burmese",
                       makedirs=True)
        safe_file_dump(os.path.join(tower_dir, "susannah"),
                       "Not sure actually",
                       makedirs=True)

        safe_file_dump(
            os.path.join(temp_dir, "books", "dark_tower", "gunslinger"),
            "1982",
            makedirs=True,
        )

        snapshot, snapshot_with_extra_files = rule_runner.scheduler.capture_snapshots(
            [
                PathGlobsAndRoot(PathGlobs(["characters/dark_tower/*"]),
                                 temp_dir),
                PathGlobsAndRoot(PathGlobs(["**"]), temp_dir),
            ])

        # Check that we got the full snapshots that we expect
        assert snapshot.files == relevant_files
        assert snapshot_with_extra_files.files == all_files

        # Strip empty prefix:
        zero_prefix_stripped_digest = rule_runner.request(
            Digest, [RemovePrefix(snapshot.digest, "")])
        assert snapshot.digest == zero_prefix_stripped_digest

        # Strip a non-empty prefix shared by all files:
        stripped_digest = rule_runner.request(
            Digest, [RemovePrefix(snapshot.digest, "characters/dark_tower")])
        assert stripped_digest == Digest(
            fingerprint=
            "71e788fc25783c424db555477071f5e476d942fc958a5d06ffc1ed223f779a8c",
            serialized_bytes_length=162,
        )

        expected_snapshot = assert_single_element(
            rule_runner.scheduler.capture_snapshots(
                [PathGlobsAndRoot(PathGlobs(["*"]), tower_dir)]))
        assert expected_snapshot.files == ("roland", "susannah")
        assert stripped_digest == expected_snapshot.digest

        # Try to strip a prefix which isn't shared by all files:
        with pytest.raises(Exception) as exc:
            rule_runner.request(
                Digest,
                [
                    RemovePrefix(snapshot_with_extra_files.digest,
                                 "characters/dark_tower")
                ],
            )
        assert (
            "Cannot strip prefix characters/dark_tower from root directory (Digest "
            "with hash Fingerprint<28c47f77867f0c8d577d2ada2f06b03fc8e5ef2d780e8942713b26c5e3f434b8>)"
            " - root directory contained non-matching directory named: books and file named: index"
        ) in str(exc.value)
    def test_compile_stale_platform_settings(self):
        # Tests that targets are properly re-compiled when their source/target levels change.
        with temporary_dir(root_dir=os.path.abspath('.')) as tmpdir:
            with open(os.path.join(tmpdir, 'BUILD'), 'w') as f:
                f.write(
                    dedent('''
        java_library(name='diamond',
          sources=['Diamond.java'],
        )
        '''))
            with open(os.path.join(tmpdir, 'Diamond.java'), 'w') as f:
                f.write(
                    dedent('''
          public class Diamond<T> {
            public static void main(String[] args) {
              // The diamond operator <> for generics was introduced in jdk7.
              Diamond<String> shinyDiamond = new Diamond<>();
            }
          }
        '''))
            platforms = {
                'java6': {
                    'source': '6'
                },
                'java7': {
                    'source': '7'
                },
            }

            # We run these all in the same working directory, because we're testing caching behavior.
            with self.temporary_workdir() as workdir:

                def compile_diamond(platform):
                    return self.run_pants_with_workdir(
                        [
                            '--jvm-platform-platforms={}'.format(platforms),
                            '--jvm-platform-default-platform={}'.format(
                                platform), '-ldebug', 'compile'
                        ] + self.get_pants_compile_args() +
                        ['{}:diamond'.format(tmpdir)],
                        workdir=workdir)

                # We shouldn't be able to compile this with -source=6.
                self.assert_failure(
                    compile_diamond('java6'),
                    'Diamond.java was compiled successfully with '
                    'java6 starting from a fresh workdir, but '
                    'that should not be possible.')

                # We should be able to compile this with -source=7.
                self.assert_success(
                    compile_diamond('java7'),
                    'Diamond.java failed to compile in java7, '
                    'which it should be able to.')

                # We still shouldn't be able to compile this with -source=6. If the below passes, it means
                #  that we saved the cached run from java7 and didn't recompile, which is an error.
                self.assert_failure(
                    compile_diamond('java6'),
                    'Diamond.java erroneously compiled in java6,'
                    ' which means that either compilation was'
                    ' skipped due to bad fingerprinting/caching,'
                    ' or the compiler failed to clean up the'
                    ' previous class from the java7'
                    ' compile.')
示例#44
0
 def _temp_task_cache_dir(self):
     with temporary_dir() as cache_dir:
         self.set_options_for_scope(
             f"cache.{self.options_scope}", read_from=[cache_dir], write_to=[cache_dir]
         )
         yield
示例#45
0
def create_isolated_git_repo():
    # Isolated Git Repo Structure:
    # worktree
    # |--README
    # |--pants.ini
    # |--3rdparty
    #    |--BUILD
    # |--src
    #    |--resources
    #       |--org/pantsbuild/resourceonly
    #          |--BUILD
    #          |--README.md
    #    |--java
    #       |--org/pantsbuild/helloworld
    #          |--BUILD
    #          |--helloworld.java
    #    |--python
    #       |--python_targets
    #          |--BUILD
    #          |--test_binary.py
    #          |--test_library.py
    #          |--test_unclaimed_src.py
    #       |--sources
    #          |--BUILD
    #          |--sources.py
    #          |--sources.txt
    # |--tests
    #    |--scala
    #       |--org/pantsbuild/cp-directories
    #          |--BUILD
    #          |--ClasspathDirectoriesSpec.scala
    with temporary_dir(root_dir=get_buildroot()) as worktree:

        def create_file(path, content):
            """Creates a file in the isolated git repo."""
            return create_file_in(worktree, path, content)

        def copy_into(path, to_path=None):
            """Copies a file from the real git repo into the isolated git repo."""
            write_path = os.path.join(worktree, to_path or path)
            if os.path.isfile(path):
                safe_mkdir(os.path.dirname(write_path))
                shutil.copyfile(path, write_path)
            else:
                shutil.copytree(path, write_path)
            return write_path

        create_file('README', 'N.B. This is just a test tree.')
        create_file(
            'pants.ini', """
      [GLOBAL]
      pythonpath: [
          "{0}/contrib/go/src/python",
          "{0}/pants-plugins/src/python"
        ]
      backend_packages: +[
          "internal_backend.utilities",
          "pants.contrib.go"
        ]
      """.format(get_buildroot()))
        copy_into('.gitignore')

        with initialize_repo(worktree=worktree,
                             gitdir=os.path.join(worktree, '.git')) as git:

            def add_to_git(commit_msg, *files):
                git.add(*files)
                git.commit(commit_msg)

            add_to_git(
                'a go target with default sources',
                create_file('src/go/tester/BUILD', 'go_binary()'),
                create_file(
                    'src/go/tester/main.go', """
          package main
          import "fmt"
          func main() {
            fmt.Println("hello, world")
          }
          """))

            add_to_git(
                'resource file',
                create_file(
                    'src/resources/org/pantsbuild/resourceonly/BUILD', """
          resources(
            name='resource',
            sources=['README.md']
          )
          """),
                create_file(
                    'src/resources/org/pantsbuild/resourceonly/README.md',
                    'Just a resource.'))

            add_to_git(
                'hello world java program with a dependency on a resource file',
                create_file(
                    'src/java/org/pantsbuild/helloworld/BUILD', """
          jvm_binary(
            dependencies=[
              'src/resources/org/pantsbuild/resourceonly:resource',
            ],
            source='helloworld.java',
            main='org.pantsbuild.helloworld.HelloWorld',
          )
          """),
                create_file(
                    'src/java/org/pantsbuild/helloworld/helloworld.java', """
          package org.pantsbuild.helloworld;

          class HelloWorld {
            public static void main(String[] args) {
              System.out.println("Hello, World!\n");
            }
          }
          """))

            add_to_git(
                'scala test target',
                copy_into(
                    'testprojects/tests/scala/org/pantsbuild/testproject/cp-directories',
                    'tests/scala/org/pantsbuild/cp-directories'))

            add_to_git(
                'python targets',
                copy_into('testprojects/src/python/python_targets',
                          'src/python/python_targets'))

            add_to_git(
                'a python_library with resources=["filename"]',
                copy_into('testprojects/src/python/sources',
                          'src/python/sources'))

            add_to_git('3rdparty/BUILD', copy_into('3rdparty/BUILD'))

            with environment_as(PANTS_BUILDROOT_OVERRIDE=worktree):
                yield worktree
 def setup_sandbox(self):
     with temporary_dir('.') as sourcedir:
         with self.temporary_workdir() as workdir:
             javadir = os.path.join(sourcedir, 'src', 'java')
             os.makedirs(javadir)
             yield self.JavaSandbox(self, workdir, javadir)
示例#47
0
 def test_relative_symlink_bad_link(self) -> None:
     with temporary_dir() as tmpdir_1:  # link is not absolute
         source = os.path.join(tmpdir_1, "source")
         link = os.path.join("foo", "bar")
         with pytest.raises(ValueError, match=r"Path for link.*absolute"):
             relative_symlink(source, link)
示例#48
0
    def test_distinguish_goals_from_specs(self) -> None:
        self.assert_valid_split(
            './pants compile test foo::',
            expected_goals=['compile', 'test'],
            expected_scope_to_flags={
                '': [],
                'compile': [],
                'test': []
            },
            expected_specs=['foo::'],
        )
        self.assert_valid_split(
            './pants compile test foo::',
            expected_goals=['compile', 'test'],
            expected_scope_to_flags={
                '': [],
                'compile': [],
                'test': []
            },
            expected_specs=['foo::'],
        )
        self.assert_valid_split(
            './pants compile test:test',
            expected_goals=['compile'],
            expected_scope_to_flags={
                '': [],
                'compile': []
            },
            expected_specs=['test:test'],
        )

        assert_test_goal_split = partial(
            self.assert_valid_split,
            expected_goals=['test'],
            expected_scope_to_flags={
                '': [],
                'test': []
            },
        )
        assert_test_goal_split('./pants test test:test',
                               expected_specs=['test:test'])
        assert_test_goal_split('./pants test ./test',
                               expected_specs=['./test'])
        assert_test_goal_split('./pants test //test',
                               expected_specs=['//test'])
        assert_test_goal_split('./pants test ./test.txt',
                               expected_specs=['./test.txt'])
        assert_test_goal_split('./pants test test/test.txt',
                               expected_specs=['test/test.txt'])
        assert_test_goal_split('./pants test test/test',
                               expected_specs=['test/test'])
        assert_test_goal_split('./pants test .', expected_specs=['.'])
        assert_test_goal_split('./pants test *', expected_specs=['*'])
        assert_test_goal_split('./pants test test/*.txt',
                               expected_specs=['test/*.txt'])
        assert_test_goal_split('./pants test test/**/*',
                               expected_specs=['test/**/*'])
        assert_test_goal_split('./pants test !', expected_specs=['!'])
        assert_test_goal_split('./pants test !a/b', expected_specs=['!a/b'])

        # An argument that looks like a file, but is a known scope, should be interpreted as a goal.
        self.assert_valid_split(
            './pants test compile.java',
            expected_goals=['test', 'compile'],
            expected_scope_to_flags={
                '': [],
                'test': [],
                'compile.java': []
            },
            expected_specs=[],
        )
        # An argument that looks like a file, and is not a known scope nor exists on the file system,
        # should be interpreted as an unknown goal.
        self.assert_unknown_goal('./pants test compile.haskell',
                                 ['compile.haskell'])
        # An argument that looks like a file, and is not a known scope but _does_ exist on the file
        # system, should be interpreted as a spec.
        with temporary_dir() as tmpdir, pushd(tmpdir):
            Path(tmpdir, 'compile.haskell').touch()
            self.assert_valid_split(
                './pants test compile.haskell',
                expected_goals=['test'],
                expected_scope_to_flags={
                    '': [],
                    'test': []
                },
                expected_specs=['compile.haskell'],
            )
示例#49
0
 def test_open_zipFalse(self) -> None:
     with temporary_dir() as tempdir:
         with open_zip(os.path.join(tempdir, 'test'), 'w',
                       allowZip64=False) as zf:
             self.assertFalse(zf._allowZip64)  # type: ignore
示例#50
0
  def test_green_junit_xml_dir(self):
    with temporary_dir() as junit_xml_dir:
      self.run_tests(targets=[self.green], junit_xml_dir=junit_xml_dir)

      self.assert_test_info(junit_xml_dir, ('test_one', 'success'))
示例#51
0
 def test_open_zipDefault(self) -> None:
     with temporary_dir() as tempdir:
         with open_zip(os.path.join(tempdir, 'test'), 'w') as zf:
             self.assertTrue(zf._allowZip64)  # type: ignore
示例#52
0
 def provide_chroot(existing):
   if existing:
     yield existing, False
   else:
     with temporary_dir() as new_chroot:
       yield new_chroot, True
示例#53
0
 def test_relative_symlink_same_paths(self) -> None:
     with temporary_dir() as tmpdir_1:  # source is link
         source = os.path.join(tmpdir_1, "source")
         with self.assertRaisesRegex(
                 ValueError, r"Path for link is identical to source"):
             relative_symlink(source, source)
示例#54
0
  def test_strip_prefix(self):
    # Set up files:

    relevant_files = (
      'characters/dark_tower/roland',
      'characters/dark_tower/susannah',
    )
    all_files = (
      'books/dark_tower/gunslinger',
      'characters/altered_carbon/kovacs',
    ) + relevant_files + (
      'index',
    )

    with temporary_dir() as temp_dir:
      safe_file_dump(os.path.join(temp_dir, 'index'), 'books\ncharacters\n')
      safe_file_dump(
        os.path.join(temp_dir, "characters", "altered_carbon", "kovacs"),
        "Envoy",
        makedirs=True,
      )

      tower_dir = os.path.join(temp_dir, "characters", "dark_tower")
      safe_file_dump(os.path.join(tower_dir, "roland"), "European Burmese", makedirs=True)
      safe_file_dump(os.path.join(tower_dir, "susannah"), "Not sure actually", makedirs=True)

      safe_file_dump(
        os.path.join(temp_dir, "books", "dark_tower", "gunslinger"),
        "1982",
        makedirs=True,
      )

      snapshot, snapshot_with_extra_files = self.scheduler.capture_snapshots((
        PathGlobsAndRoot(PathGlobs(("characters/dark_tower/*",)), temp_dir),
        PathGlobsAndRoot(PathGlobs(("**",)), temp_dir),
      ))
      # Check that we got the full snapshots that we expect
      self.assertEquals(snapshot.files, relevant_files)
      self.assertEquals(snapshot_with_extra_files.files, all_files)

      # Strip empty prefix:
      zero_prefix_stripped_digest = assert_single_element(self.scheduler.product_request(
        Digest,
        [DirectoryWithPrefixToStrip(snapshot.directory_digest, "")],
      ))
      self.assertEquals(snapshot.directory_digest, zero_prefix_stripped_digest)

      # Strip a non-empty prefix shared by all files:
      stripped_digest = assert_single_element(self.scheduler.product_request(
        Digest,
        [DirectoryWithPrefixToStrip(snapshot.directory_digest, "characters/dark_tower")],
      ))
      self.assertEquals(
        stripped_digest,
        Digest(
          fingerprint='71e788fc25783c424db555477071f5e476d942fc958a5d06ffc1ed223f779a8c',
          serialized_bytes_length=162,
        )
      )
      expected_snapshot = assert_single_element(self.scheduler.capture_snapshots((
        PathGlobsAndRoot(PathGlobs(("*",)), tower_dir),
      )))
      self.assertEquals(expected_snapshot.files, ('roland', 'susannah'))
      self.assertEquals(stripped_digest, expected_snapshot.directory_digest)

      # Try to strip a prefix which isn't shared by all files:
      with self.assertRaisesWithMessageContaining(Exception, "Cannot strip prefix characters/dark_tower from root directory Digest(Fingerprint<28c47f77867f0c8d577d2ada2f06b03fc8e5ef2d780e8942713b26c5e3f434b8>, 243) - root directory contained non-matching directory named: books and file named: index"):
        self.scheduler.product_request(
          Digest,
          [DirectoryWithPrefixToStrip(snapshot_with_extra_files.directory_digest, "characters/dark_tower")]
        )
    def test_incremental_caching(self):
        with temporary_dir(root_dir=self.pants_workdir) as jar_dir, \
             temporary_dir(root_dir=self.pants_workdir) as dist_dir:
            self.set_options(pants_distdir=dist_dir)

            target = self.make_target(
                'java/classpath:java_lib',
                target_type=JavaLibrary,
                sources=['com/foo/Bar.java'],
            )
            context = self.context(target_roots=[target])
            runtime_classpath = context.products.get_data(
                'runtime_classpath',
                init_func=ClasspathProducts.init_func(self.pants_workdir))
            task = self.create_task(context)

            target_classpath_output = os.path.join(dist_dir,
                                                   self.options_scope)

            # Create a classpath entry.
            touch(os.path.join(jar_dir, 'z1.jar'))
            runtime_classpath.add_for_target(
                target, [(self.DEFAULT_CONF, os.path.join(jar_dir, 'z1.jar'))])
            task.execute()
            # Check only one symlink and classpath.txt were created.
            self.assertEqual(len(os.listdir(target_classpath_output)), 2)
            self.assertEqual(
                os.path.realpath(
                    os.path.join(
                        target_classpath_output,
                        sorted(os.listdir(target_classpath_output))[0])),
                os.path.join(jar_dir, 'z1.jar'))

            # Remove the classpath entry.
            runtime_classpath.remove_for_target(
                target, [(self.DEFAULT_CONF, os.path.join(jar_dir, 'z1.jar'))])

            # Add a different classpath entry
            touch(os.path.join(jar_dir, 'z2.jar'))
            runtime_classpath.add_for_target(
                target, [(self.DEFAULT_CONF, os.path.join(jar_dir, 'z2.jar'))])
            task.execute()
            # Check the symlink was updated.
            self.assertEqual(len(os.listdir(target_classpath_output)), 2)
            self.assertEqual(
                os.path.realpath(
                    os.path.join(
                        target_classpath_output,
                        sorted(os.listdir(target_classpath_output))[0])),
                os.path.join(jar_dir, 'z2.jar'))

            # Add a different classpath entry
            touch(os.path.join(jar_dir, 'z3.jar'))
            runtime_classpath.add_for_target(
                target, [(self.DEFAULT_CONF, os.path.join(jar_dir, 'z3.jar'))])
            task.execute()
            self.assertEqual(len(os.listdir(target_classpath_output)), 3)

            classpath = sorted(os.listdir(target_classpath_output))[2]
            with safe_open(os.path.join(target_classpath_output,
                                        classpath)) as classpath_file:
                # Assert there is only one line ending with a newline
                self.assertListEqual(classpath_file.readlines(), [
                    os.pathsep.join([
                        os.path.join(jar_dir, 'z2.jar'),
                        os.path.join(jar_dir, 'z3.jar')
                    ]) + '\n'
                ])
 def temporary_cachedir(self):
   return temporary_dir(suffix='__CACHEDIR')
示例#57
0
 def test_junit_run_chroot_cwd_mutex(self):
   with temporary_dir() as chroot:
     self.set_options(chroot=True, cwd=chroot)
     with self.assertRaises(JUnitRun.OptionError):
       self.execute(self.context())
示例#58
0
 def _create_mock_build_file(self, dirname):
     with temporary_dir() as root:
         os.mkdir(os.path.join(root, dirname))
         touch(os.path.join(root, dirname, 'BUILD'))
         yield BuildFile(FileSystemProjectTree(root),
                         os.path.join(dirname, 'BUILD'))
 def temporary_sourcedir(self):
   return temporary_dir(root_dir=get_buildroot())
示例#60
0
 def root(self):
   with temporary_dir() as root:
     # Avoid OSX issues where tmp dirs are reported as symlinks.
     real_root = os.path.realpath(root)
     with pushd(real_root):
       yield real_root