コード例 #1
0
  def _do_test_caching(self, *compiles):
    """Tests that the given compiles within the same workspace produce the given artifact counts."""
    with temporary_dir() as cache_dir, \
        self.temporary_workdir() as workdir, \
        temporary_dir(root_dir=get_buildroot()) as src_dir:

      def complete_config(config):
        # Clone the input config and add cache settings.
        cache_settings = {'write_to': [cache_dir], 'read_from': [cache_dir]}
        return dict(config.items() + [('cache.compile.zinc', cache_settings)])

      buildfile = os.path.join(src_dir, 'BUILD')
      spec = os.path.join(src_dir, ':cachetest')
      artifact_dir = os.path.join(cache_dir,
                                  ZincCompile.stable_name(),
                                  '{}.cachetest'.format(os.path.basename(src_dir)))

      for c in compiles:
        # Clear the src directory and recreate the files.
        safe_mkdir(src_dir, clean=True)
        self.create_file(buildfile,
                        """java_library(name='cachetest', sources=rglobs('*.java', '*.scala'))""")
        for name, content in c.srcfiles.items():
          self.create_file(os.path.join(src_dir, name), content)

        # Compile, and confirm that we have the right count of artifacts.
        self.run_compile(spec, complete_config(c.config), workdir)
        self.assertEquals(c.artifact_count, len(os.listdir(artifact_dir)))
コード例 #2
0
    def execute_export(self, *specs, **options_overrides):
        options = {
            ScalaPlatform.options_scope: {"version": "custom"},
            JvmResolveSubsystem.options_scope: {"resolver": "coursier"},
            JvmPlatform.options_scope: {
                "default_platform": "java8",
                "platforms": {"java8": {"source": "1.8", "target": "1.8"}},
            },
        }
        options.update(options_overrides)

        # We are only initializing ZincCompile to access the instance method `calculate_jvm_modulizable_targets`
        ZincCompile.options_scope = "compile.rsc"
        BootstrapJvmTools.options_scope = "bootstrap-jvm-tools"
        context = self.context(
            options=options,
            target_roots=[self.target(spec) for spec in specs],
            for_subsystems=[JvmPlatform],
            for_task_types=[BootstrapJvmTools, ZincCompile],
        )

        context.products.safe_create_data("zinc_args", init_func=lambda: MagicMock())
        self.prep_before_export(context)

        context.products.require_data("jvm_modulizable_targets")
        # This simulates ZincCompile creates the product.
        ZincCompile(context, self.pants_workdir).calculate_jvm_modulizable_targets()

        bootstrap_task = BootstrapJvmTools(context, self.pants_workdir)
        bootstrap_task.execute()
        task = self.create_task(context)
        return list(task.console_output(list(task.context.targets())))
コード例 #3
0
  def test_java_compile_produces_different_artifact_depending_on_java_version(self):
    # Ensure that running java compile with java 6 and then java 7
    # produces two different artifacts.

    with temporary_dir() as cache_dir:
      artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(),
          'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main')
      config = {'cache.compile.zinc': {'write_to': [cache_dir]}}

      pants_run = self.run_pants(self.create_platform_args(6) +
                                 ['compile',
                                  'testprojects/src/java/org/pantsbuild/testproject/unicode/main'],
                                 config)
      self.assert_success(pants_run)

      # One artifact for java 6
      self.assertEqual(len(os.listdir(artifact_dir)), 1)

      # Rerun for java 7
      pants_run = self.run_pants(self.create_platform_args(7) +
                                 ['compile',
                                  'testprojects/src/java/org/pantsbuild/testproject/unicode/main'],
                                 config)
      self.assert_success(pants_run)

      # One artifact for java 6 and one for 7
      self.assertEqual(len(os.listdir(artifact_dir)), 2)
コード例 #4
0
    def test_java_compile_produces_different_artifact_depending_on_java_version(
            self):
        # Ensure that running java compile with java 6 and then java 7
        # produces two different artifacts.

        with temporary_dir() as cache_dir:
            artifact_dir = os.path.join(
                cache_dir, ZincCompile.stable_name(),
                'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main'
            )
            config = {'cache.compile.zinc': {'write_to': [cache_dir]}}

            pants_run = self.run_pants(
                self.create_platform_args(6) + [
                    'compile.java',
                    'testprojects/src/java/org/pantsbuild/testproject/unicode/main'
                ], config)
            self.assert_success(pants_run)

            # One artifact for java 6
            self.assertEqual(len(os.listdir(artifact_dir)), 1)

            # Rerun for java 7
            pants_run = self.run_pants(
                self.create_platform_args(7) + [
                    'compile.java',
                    'testprojects/src/java/org/pantsbuild/testproject/unicode/main'
                ], config)
            self.assert_success(pants_run)

            # One artifact for java 6 and one for 7
            self.assertEqual(len(os.listdir(artifact_dir)), 2)
コード例 #5
0
    def _do_test_caching(self, *compiles):
        """Tests that the given compiles within the same workspace produce the given artifact counts."""
        with temporary_dir() as cache_dir, \
            self.temporary_workdir() as workdir, \
            temporary_dir(root_dir=get_buildroot()) as src_dir:

            def complete_config(config):
                # Clone the input config and add cache settings.
                cache_settings = {
                    'write_to': [cache_dir],
                    'read_from': [cache_dir]
                }
                return dict(config.items() +
                            [('cache.compile.zinc', cache_settings)])

            buildfile = os.path.join(src_dir, 'BUILD')
            spec = os.path.join(src_dir, ':cachetest')
            artifact_dir = os.path.join(
                cache_dir, ZincCompile.stable_name(),
                '{}.cachetest'.format(os.path.basename(src_dir)))

            for c in compiles:
                # Clear the src directory and recreate the files.
                safe_mkdir(src_dir, clean=True)
                self.create_file(
                    buildfile,
                    """java_library(name='cachetest', sources=rglobs('*.java', '*.scala'))"""
                )
                for name, content in c.srcfiles.items():
                    self.create_file(os.path.join(src_dir, name), content)

                # Compile, and confirm that we have the right count of artifacts.
                self.run_compile(spec, complete_config(c.config), workdir)
                self.assertEquals(c.artifact_count,
                                  len(os.listdir(artifact_dir)))
コード例 #6
0
ファイル: test_cache_cleanup.py プロジェクト: lclementi/pants
  def test_leave_one(self):
    """Ensure that max-old of 1 removes all but one files"""

    with temporary_dir() as cache_dir:
      artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(),
          'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main')

      touch(os.path.join(artifact_dir, 'old_cache_test1'))
      touch(os.path.join(artifact_dir, 'old_cache_test2'))
      touch(os.path.join(artifact_dir, 'old_cache_test3'))
      touch(os.path.join(artifact_dir, 'old_cache_test4'))
      touch(os.path.join(artifact_dir, 'old_cache_test5'))

      config = {'cache.compile.zinc': {'write_to': [cache_dir]}}

      pants_run = self.run_pants(self.create_platform_args(6) +
                                 ['compile.zinc',
                                  'testprojects/src/java/org/pantsbuild/testproject/unicode/main',
                                  '--cache-max-entries-per-target=1'],
                                 config=config)
      self.assert_success(pants_run)

      # One artifact for java 6
      self.assertEqual(len(os.listdir(artifact_dir)), 1)

      # Rerun for java 7
      pants_run = self.run_pants(self.create_platform_args(7) +
                                 ['compile.zinc',
                                  'testprojects/src/java/org/pantsbuild/testproject/unicode/main',
                                  '--cache-max-entries-per-target=1'],
                                 config)
      self.assert_success(pants_run)

      # One artifact for java 7
      self.assertEqual(len(os.listdir(artifact_dir)), 1)
コード例 #7
0
    def test_java_compile_with_different_resolved_jars_produce_different_artifacts(self):
        # Since unforced dependencies resolve to the highest version including transitive jars,
        # We want to ensure that running java compile with binary incompatible libraries will
        # produces two different artifacts.

        with temporary_dir(root_dir=self.workdir_root()) as workdir, temporary_dir() as cache_dir:
            path_prefix = "testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility"
            dotted_path = path_prefix.replace(os.path.sep, ".")
            artifact_dir = os.path.join(
                cache_dir, ZincCompile.stable_name(), "{}.jarversionincompatibility".format(dotted_path)
            )
            config = {
                "cache.compile.zinc": {"write_to": [cache_dir], "read_from": [cache_dir]},
                "compile.zinc": {"incremental_caching": True},
            }

            pants_run = self.run_pants_with_workdir(
                ["compile", ("{}:only-15-directly".format(path_prefix))], workdir, config
            )
            self.assert_success(pants_run)

            # One artifact for guava 15
            self.assertEqual(len(os.listdir(artifact_dir)), 1)

            # Rerun for guava 16
            pants_run = self.run_pants_with_workdir(
                ["compile", ("{}:alongside-16".format(path_prefix))], workdir, config
            )
            self.assert_success(pants_run)

            # One artifact for guava 15 and one for guava 16
            self.assertEqual(len(os.listdir(artifact_dir)), 2)
コード例 #8
0
    def generate_targets_map(self, targets, runtime_classpath, zinc_args_for_all_targets):
        """Generates a dictionary containing all pertinent information about the target graph.

        The return dictionary is suitable for serialization by json.dumps.
        :param all_targets: The list of targets to generate the map for.
        :param runtime_classpath: ClasspathProducts containing entries for all the resolved and compiled
          dependencies.
        :param zinc_args_for_all_targets: Map from zinc compiled targets to the args used to compile them.
        """
        all_targets = self._get_all_targets(targets)
        libraries_map = self._resolve_jars_info(all_targets, runtime_classpath)

        targets_map = {}
        resource_target_map = {}

        for t in all_targets:
            for dep in t.dependencies:
                if isinstance(dep, Resources):
                    resource_target_map[dep] = t

        modulizable_targets = self._get_targets_to_make_into_modules(
            resource_target_map, runtime_classpath
        )
        non_modulizable_targets = all_targets.difference(modulizable_targets)

        for t in non_modulizable_targets:
            libraries_map[t.id] = self._make_libraries_entry(
                t, resource_target_map, runtime_classpath
            )

        flat_non_modulizable_deps_for_modulizable_targets: Dict[
            Target, FrozenOrderedSet[Target]
        ] = self._flat_non_modulizable_deps_for_modulizable_targets(modulizable_targets)

        for target in modulizable_targets:
            zinc_args_for_target = zinc_args_for_all_targets.get(target)
            if zinc_args_for_target is None:
                if not ZincCompile.select(target):
                    # Targets that weren't selected by ZincCompile also wont have zinc args.
                    zinc_args_for_target = []
                else:
                    raise TaskError(
                        f"There was an error exporting target {target} - There were no zinc arguments registered for it"
                    )
            info = self._process_target(
                target,
                modulizable_targets,
                resource_target_map,
                runtime_classpath,
                zinc_args_for_target,
                flat_non_modulizable_deps_for_modulizable_targets,
            )
            targets_map[target.address.spec] = info

        graph_info = self.initialize_graph_info()
        graph_info["targets"] = targets_map
        graph_info["libraries"] = libraries_map

        return graph_info
コード例 #9
0
 def test_java_home_extraction_empty(self):
     result = tuple(
         ZincCompile._get_zinc_arguments(
             JvmPlatformSettings('1.7', '1.7', [])))
     self.assertEquals(
         4,
         len(result),
         msg='_get_zinc_arguments did not correctly handle empty args.')
コード例 #10
0
  def test_nocache(self):
    with temporary_dir() as cache_dir:
      bad_artifact_dir = os.path.join(cache_dir,
          ZincCompile.stable_name(),
          'testprojects.src.java.org.pantsbuild.testproject.nocache.nocache')
      good_artifact_dir = os.path.join(cache_dir,
          ZincCompile.stable_name(),
          'testprojects.src.java.org.pantsbuild.testproject.nocache.cache_me')
      config = {'cache.compile.zinc': {'write_to': [cache_dir]}}

      pants_run = self.run_pants(['compile',
                                  'testprojects/src/java/org/pantsbuild/testproject/nocache::'],
                                 config)
      self.assert_success(pants_run)

      # The nocache target is labeled with no_cache so it should not be written to the
      # artifact cache.
      self.assertFalse(os.path.exists(bad_artifact_dir))
      # But cache_me should be written.
      self.assertEqual(len(os.listdir(good_artifact_dir)), 1)
コード例 #11
0
  def test_nocache(self):
    with temporary_dir() as cache_dir:
      bad_artifact_dir = os.path.join(cache_dir,
          ZincCompile.stable_name(),
          'testprojects.src.java.org.pantsbuild.testproject.nocache.nocache')
      good_artifact_dir = os.path.join(cache_dir,
          ZincCompile.stable_name(),
          'testprojects.src.java.org.pantsbuild.testproject.nocache.cache_me')
      config = {'cache.compile.zinc': {'write_to': [cache_dir]}}

      pants_run = self.run_pants(['compile',
                                  'testprojects/src/java/org/pantsbuild/testproject/nocache::'],
                                 config)
      self.assert_success(pants_run)

      # The nocache target is labeled with no_cache so it should not be written to the
      # artifact cache.
      self.assertFalse(os.path.exists(bad_artifact_dir))
      # But cache_me should be written.
      self.assertEqual(len(os.listdir(good_artifact_dir)), 1)
コード例 #12
0
    def test_buildcache_leave_none(self):
        """Ensure that max-old of zero removes all files

    This test should ensure that conditional doesn't change to the simpler test of if max_old since
    we need to handle zero as well.
    """

        with temporary_dir() as cache_dir:
            artifact_dir = os.path.join(
                cache_dir,
                ZincCompile.stable_name(),
                "testprojects.src.java.org.pantsbuild.testproject.unicode.main.main",
            )

            touch(os.path.join(artifact_dir, "old_cache_test1"))
            touch(os.path.join(artifact_dir, "old_cache_test2"))
            touch(os.path.join(artifact_dir, "old_cache_test3"))
            touch(os.path.join(artifact_dir, "old_cache_test4"))
            touch(os.path.join(artifact_dir, "old_cache_test5"))

            config = {"cache.compile.zinc": {"write_to": [cache_dir]}}

            pants_run = self.run_pants(
                self.create_platform_args(6)
                + [
                    "compile.zinc",
                    "testprojects/src/java/org/pantsbuild/testproject/unicode/main",
                    "--cache-max-entries-per-target=0",
                ],
                config=config,
            )
            self.assert_success(pants_run)

            # Cache cleanup disabled for 0

            self.assertEqual(len(os.listdir(artifact_dir)), 6)

            # Rerun for java 7
            pants_run = self.run_pants(
                self.create_platform_args(7)
                + [
                    "compile.zinc",
                    "testprojects/src/java/org/pantsbuild/testproject/unicode/main",
                    "--cache-max-entries-per-target=0",
                ],
                config,
            )
            self.assert_success(pants_run)

            # Cache cleanup disabled for 0
            self.assertEqual(len(os.listdir(artifact_dir)), 7)
コード例 #13
0
  def test_java_home_extraction(self):
    _, source, _, target, foo, bar, composite, single = tuple(ZincCompile._get_zinc_arguments(
      JvmPlatformSettings('1.7', '1.7', [
        'foo', 'bar', 'foo:$JAVA_HOME/bar:$JAVA_HOME/foobar', '$JAVA_HOME',
      ])
    ))

    self.assertEquals('-C1.7', source)
    self.assertEquals('-C1.7', target)
    self.assertEquals('foo', foo)
    self.assertEquals('bar', bar)
    self.assertNotEqual('$JAVA_HOME', single)
    self.assertNotIn('$JAVA_HOME', composite)
    self.assertEquals('foo:{0}/bar:{0}/foobar'.format(single), composite)
コード例 #14
0
    def test_java_compile_reads_resource_mapping(self):
        # Ensure that if an annotation processor produces a resource-mapping,
        # the artifact contains that resource mapping.

        with temporary_dir() as cache_dir:
            artifact_dir = os.path.join(
                cache_dir, ZincCompile.stable_name(),
                'testprojects.src.java.org.pantsbuild.testproject.annotation.main.main'
            )
            config = {'cache.compile.zinc': {'write_to': [cache_dir]}}

            pants_run = self.run_pants([
                'compile',
                'testprojects/src/java/org/pantsbuild/testproject/annotation/main'
            ], config)
            self.assert_success(pants_run)

            self.assertTrue(os.path.exists(artifact_dir))
            artifacts = os.listdir(artifact_dir)
            self.assertEqual(len(artifacts), 1)

            with temporary_dir() as extract_dir:
                TarArchiver.extract(os.path.join(artifact_dir, artifacts[0]),
                                    extract_dir)
                all_files = set()
                for dirpath, dirs, files in safe_walk(extract_dir):
                    for name in files:
                        path = os.path.join(dirpath, name)
                        all_files.add(path)

                # Locate the report file on the classpath.
                report_file_name = 'deprecation_report.txt'
                reports = [
                    f for f in all_files if f.endswith(report_file_name)
                ]
                self.assertEquals(
                    1, len(reports),
                    'Expected exactly one {} file; got: {}'.format(
                        report_file_name, all_files))

                with open(reports[0]) as fp:
                    annotated_classes = [
                        line.rstrip() for line in fp.read().splitlines()
                    ]
                    self.assertEquals(
                        {
                            'org.pantsbuild.testproject.annotation.main.Main',
                            'org.pantsbuild.testproject.annotation.main.Main$TestInnerClass'
                        }, set(annotated_classes))
コード例 #15
0
    def test_java_compile_reads_resource_mapping(self):
        # Ensure that if an annotation processor produces a resource-mapping,
        # the artifact contains that resource mapping.

        with temporary_dir() as cache_dir:
            artifact_dir = os.path.join(
                cache_dir,
                ZincCompile.stable_name(),
                "testprojects.src.java.org.pantsbuild.testproject.annotation.main.main",
            )
            config = {"cache.compile.zinc": {"write_to": [cache_dir]}}

            pants_run = self.run_pants(
                ["compile", "testprojects/src/java/org/pantsbuild/testproject/annotation/main"], config
            )
            self.assert_success(pants_run)

            self.assertTrue(os.path.exists(artifact_dir))
            artifacts = os.listdir(artifact_dir)
            self.assertEqual(len(artifacts), 1)

            with temporary_dir() as extract_dir:
                TarArchiver.extract(os.path.join(artifact_dir, artifacts[0]), extract_dir)
                all_files = set()
                for dirpath, dirs, files in safe_walk(extract_dir):
                    for name in files:
                        path = os.path.join(dirpath, name)
                        all_files.add(path)

                # Locate the report file on the classpath.
                report_file_name = "deprecation_report.txt"
                reports = [f for f in all_files if f.endswith(report_file_name)]
                self.assertEquals(
                    1, len(reports), "Expected exactly one {} file; got: {}".format(report_file_name, all_files)
                )

                with open(reports[0]) as fp:
                    annotated_classes = [line.rstrip() for line in fp.read().splitlines()]
                    self.assertEquals(
                        {
                            "org.pantsbuild.testproject.annotation.main.Main",
                            "org.pantsbuild.testproject.annotation.main.Main$TestInnerClass",
                        },
                        set(annotated_classes),
                    )
コード例 #16
0
    def test_buildcache_leave_none(self):
        """Ensure that max-old of zero removes all files

    This test should ensure that conditional doesn't change to the simpler test of if max_old since
    we need to handle zero as well.
    """

        with temporary_dir() as cache_dir:
            artifact_dir = os.path.join(
                cache_dir, ZincCompile.stable_name(),
                'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main'
            )

            touch(os.path.join(artifact_dir, 'old_cache_test1'))
            touch(os.path.join(artifact_dir, 'old_cache_test2'))
            touch(os.path.join(artifact_dir, 'old_cache_test3'))
            touch(os.path.join(artifact_dir, 'old_cache_test4'))
            touch(os.path.join(artifact_dir, 'old_cache_test5'))

            config = {'cache.compile.zinc': {'write_to': [cache_dir]}}

            pants_run = self.run_pants(self.create_platform_args(6) + [
                'compile.zinc',
                'testprojects/src/java/org/pantsbuild/testproject/unicode/main',
                '--cache-max-entries-per-target=0'
            ],
                                       config=config)
            self.assert_success(pants_run)

            # Cache cleanup disabled for 0

            self.assertEqual(len(os.listdir(artifact_dir)), 6)

            # Rerun for java 7
            pants_run = self.run_pants(
                self.create_platform_args(7) + [
                    'compile.zinc',
                    'testprojects/src/java/org/pantsbuild/testproject/unicode/main',
                    '--cache-max-entries-per-target=0'
                ], config)
            self.assert_success(pants_run)

            # Cache cleanup disabled for 0
            self.assertEqual(len(os.listdir(artifact_dir)), 7)
コード例 #17
0
    def test_incremental_caching(self):
        """Tests that with --no-incremental-caching, we don't write incremental artifacts."""
        with temporary_dir() as cache_dir, \
            self.temporary_workdir() as workdir, \
            temporary_dir(root_dir=get_buildroot()) as src_dir:

            def config(incremental_caching):
                return {
                    'cache.compile.zinc': {
                        'write_to': [cache_dir],
                        'read_from': [cache_dir]
                    },
                    'compile.zinc': {
                        'incremental_caching': incremental_caching
                    },
                }

            srcfile = os.path.join(src_dir, 'A.java')
            buildfile = os.path.join(src_dir, 'BUILD')
            spec = os.path.join(src_dir, ':cachetest')
            artifact_dir = os.path.join(
                cache_dir, ZincCompile.stable_name(),
                '{}.cachetest'.format(os.path.basename(src_dir)))

            self.create_file(srcfile, """class A {}""")
            self.create_file(
                buildfile,
                """java_library(name='cachetest', sources=['A.java'])""")

            # Confirm that the result is one cached artifact.
            self.run_compile(spec, config(False), workdir)
            clean_artifacts = os.listdir(artifact_dir)
            self.assertEquals(1, len(clean_artifacts))

            # Modify the file, and confirm that artifacts haven't changed.
            self.create_file(srcfile, """final class A {}""")
            self.run_compile(spec, config(False), workdir)
            self.assertEquals(clean_artifacts, os.listdir(artifact_dir))

            # Modify again, this time with incremental and confirm that we have a second artifact.
            self.create_file(srcfile, """public final class A {}""")
            self.run_compile(spec, config(True), workdir)
            self.assertEquals(2, len(os.listdir(artifact_dir)))
コード例 #18
0
    def test_java_compile_with_different_resolved_jars_produce_different_artifacts(
            self):
        # Since unforced dependencies resolve to the highest version including transitive jars,
        # We want to ensure that running java compile with binary incompatible libraries will
        # produces two different artifacts.

        with temporary_dir(root_dir=self.workdir_root()
                           ) as workdir, temporary_dir() as cache_dir:
            path_prefix = 'testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility'
            dotted_path = path_prefix.replace(os.path.sep, '.')
            artifact_dir = os.path.join(
                cache_dir, ZincCompile.stable_name(),
                '{}.jarversionincompatibility'.format(dotted_path))
            config = {
                'cache.compile.zinc': {
                    'write_to': [cache_dir],
                    'read_from': [cache_dir],
                },
                'compile.zinc': {
                    'incremental_caching': True,
                },
            }

            pants_run = self.run_pants_with_workdir(
                ['compile.java',
                 ('{}:only-15-directly'.format(path_prefix))], workdir, config)
            self.assert_success(pants_run)

            # One artifact for guava 15
            self.assertEqual(len(os.listdir(artifact_dir)), 1)

            # Rerun for guava 16
            pants_run = self.run_pants_with_workdir(
                ['compile.java',
                 (u'{}:alongside-16'.format(path_prefix))], workdir, config)
            self.assert_success(pants_run)

            # One artifact for guava 15 and one for guava 16
            self.assertEqual(len(os.listdir(artifact_dir)), 2)
コード例 #19
0
  def test_incremental_caching(self):
    """Tests that with --no-incremental-caching, we don't write incremental artifacts."""
    with temporary_dir() as cache_dir, \
        temporary_dir(root_dir=self.workdir_root()) as workdir, \
        temporary_dir(root_dir=get_buildroot()) as src_dir:

      tool_name = 'zinc'
      def config(incremental_caching):
        return {
          'cache.compile.{}'.format(tool_name): {'write_to': [cache_dir], 'read_from': [cache_dir]},
          'compile.{}'.format(tool_name): {'incremental_caching': incremental_caching},
        }

      srcfile = os.path.join(src_dir, 'A.java')
      buildfile = os.path.join(src_dir, 'BUILD')
      spec = os.path.join(src_dir, ':cachetest')
      artifact_dir = os.path.join(cache_dir,
                                  ZincCompile.stable_name(),
                                  '{}.cachetest'.format(os.path.basename(src_dir)))

      self.create_file(srcfile, """class A {}""")
      self.create_file(buildfile, """java_library(name='cachetest', sources=['A.java'])""")


      # Confirm that the result is one cached artifact.
      self.run_compile(spec, config(False), workdir, tool_name)
      clean_artifacts = os.listdir(artifact_dir)
      self.assertEquals(1, len(clean_artifacts))

      # Modify the file, and confirm that artifacts haven't changed.
      self.create_file(srcfile, """final class A {}""")
      self.run_compile(spec, config(False), workdir, tool_name)
      self.assertEquals(clean_artifacts, os.listdir(artifact_dir))

      # Modify again, this time with incremental and confirm that we have a second artifact.
      self.create_file(srcfile, """public final class A {}""")
      self.run_compile(spec, config(True), workdir, tool_name)
      self.assertEquals(2, len(os.listdir(artifact_dir)))
コード例 #20
0
  def test_java_compile_with_different_resolved_jars_produce_different_artifacts(self):
    # Since unforced dependencies resolve to the highest version including transitive jars,
    # We want to ensure that running java compile with binary incompatible libraries will
    # produces two different artifacts.

    with self.temporary_workdir() as workdir, temporary_dir() as cache_dir:
      path_prefix = 'testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility'
      dotted_path = path_prefix.replace(os.path.sep, '.')
      artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(),
                                  '{}.jarversionincompatibility'.format(dotted_path))
      config = {
          'cache.compile.zinc': {
            'write_to': [cache_dir],
            'read_from': [cache_dir],
          },
          'compile.zinc': {
            'incremental_caching': True,
          },
      }

      pants_run = self.run_pants_with_workdir(['compile',
                                               ('{}:only-15-directly'.format(path_prefix))],
                                              workdir,
                                              config)
      self.assert_success(pants_run)

      # One artifact for guava 15
      self.assertEqual(len(os.listdir(artifact_dir)), 1)

      # Rerun for guava 16
      pants_run = self.run_pants_with_workdir(['compile',
                                               (u'{}:alongside-16'.format(path_prefix))],
                                              workdir,
                                              config)
      self.assert_success(pants_run)

      # One artifact for guava 15 and one for guava 16
      self.assertEqual(len(os.listdir(artifact_dir)), 2)
コード例 #21
0
 def _get_zinc_arguments(self, settings):
   distribution = JvmCompile._local_jvm_distribution(settings=settings)
   return ZincCompile._format_zinc_arguments(settings, distribution)
コード例 #22
0
  def test_java_home_extraction_missing_distributions(self):
    # This will need to be bumped if java ever gets to major version one million.
    far_future_version = '999999.1'
    farer_future_version = '999999.2'

    os_name = normalize_os_name(get_os_name())

    @contextmanager
    def fake_distributions(versions):
      """Create a fake JDK for each java version in the input, and yield the list of java_homes.

      :param list versions: List of java version strings.
      """
      fakes = []
      for version in versions:
        fakes.append(distribution(
          executables=[EXE('bin/java', version), EXE('bin/javac', version)],
        ))
      yield [d.__enter__() for d in fakes]
      for d in fakes:
        d.__exit__(None, None, None)

    @contextmanager
    def fake_distribution_locator(*versions):
      """Sets up a fake distribution locator with fake distributions.

      Creates one distribution for each java version passed as an argument, and yields a list of
      paths to the java homes for each distribution.
      """
      with fake_distributions(versions) as paths:
        path_options = {
          DistributionLocator.options_scope: {
            'paths': {
              os_name: paths,
            }
          }
        }
        Subsystem.reset()
        init_subsystem(DistributionLocator, options=path_options)
        yield paths

    # Completely missing a usable distribution.
    with fake_distribution_locator(far_future_version):
      with self.assertRaises(DistributionLocator.Error):
        ZincCompile._get_zinc_arguments(JvmPlatformSettings(
          source_level=farer_future_version,
          target_level=farer_future_version,
          args=['$JAVA_HOME/foo'],
        ))

    # Missing a strict distribution.
    with fake_distribution_locator(farer_future_version) as paths:
      results = ZincCompile._get_zinc_arguments(JvmPlatformSettings(
        source_level=far_future_version,
        target_level=far_future_version,
        args=['$JAVA_HOME/foo', '$JAVA_HOME'],
      ))
      self.assertEquals(paths[0], results[-1])
      self.assertEquals('{}/foo'.format(paths[0]), results[-2])

    # Make sure we pick up the strictest possible distribution.
    with fake_distribution_locator(farer_future_version, far_future_version) as paths:
      farer_path, far_path = paths
      results = ZincCompile._get_zinc_arguments(JvmPlatformSettings(
        source_level=far_future_version,
        target_level=far_future_version,
        args=['$JAVA_HOME/foo', '$JAVA_HOME'],
      ))
      self.assertEquals(far_path, results[-1])
      self.assertEquals('{}/foo'.format(far_path), results[-2])

    # Make sure we pick the higher distribution when the lower one doesn't work.
    with fake_distribution_locator(farer_future_version, far_future_version) as paths:
      farer_path, far_path = paths
      results = ZincCompile._get_zinc_arguments(JvmPlatformSettings(
        source_level=farer_future_version,
        target_level=farer_future_version,
        args=['$JAVA_HOME/foo', '$JAVA_HOME'],
      ))
      self.assertEquals(farer_path, results[-1])
      self.assertEquals('{}/foo'.format(farer_path), results[-2])
コード例 #23
0
 def test_java_home_extraction_empty(self):
   result = tuple(ZincCompile._get_zinc_arguments(
     JvmPlatformSettings('1.7', '1.7', [])
   ))
   self.assertEquals(4, len(result),
                     msg='_get_zinc_arguments did not correctly handle empty args.')
コード例 #24
0
  def test_java_home_extraction_missing_distributions(self):
    # This will need to be bumped if java ever gets to major version one million.
    far_future_version = '999999.1'
    farer_future_version = '999999.2'

    os_name = normalize_os_name(get_os_name())

    @contextmanager
    def fake_distributions(versions):
      """Create a fake JDK for each java version in the input, and yield the list of java_homes.

      :param list versions: List of java version strings.
      """
      fakes = []
      for version in versions:
        fakes.append(distribution(
          executables=[EXE('bin/java', version), EXE('bin/javac', version)],
        ))
      yield [d.__enter__() for d in fakes]
      for d in fakes:
        d.__exit__(None, None, None)

    @contextmanager
    def fake_distribution_locator(*versions):
      """Sets up a fake distribution locator with fake distributions.

      Creates one distribution for each java version passed as an argument, and yields a list of
      paths to the java homes for each distribution.
      """
      with fake_distributions(versions) as paths:
        path_options = {
          'jvm-distributions': {
            'paths': {
              os_name: paths,
            }
          }
        }
        with subsystem_instance(DistributionLocator, **path_options) as locator:
          yield paths
          locator._reset()

    # Completely missing a usable distribution.
    with fake_distribution_locator(far_future_version):
      with self.assertRaises(DistributionLocator.Error):
        ZincCompile._get_zinc_arguments(JvmPlatformSettings(
          source_level=farer_future_version,
          target_level=farer_future_version,
          args=['$JAVA_HOME/foo'],
        ))

    # Missing a strict distribution.
    with fake_distribution_locator(farer_future_version) as paths:
      results = ZincCompile._get_zinc_arguments(JvmPlatformSettings(
        source_level=far_future_version,
        target_level=far_future_version,
        args=['$JAVA_HOME/foo', '$JAVA_HOME'],
      ))
      self.assertEquals(paths[0], results[-1])
      self.assertEquals('{}/foo'.format(paths[0]), results[-2])

    # Make sure we pick up the strictest possible distribution.
    with fake_distribution_locator(farer_future_version, far_future_version) as paths:
      farer_path, far_path = paths
      results = ZincCompile._get_zinc_arguments(JvmPlatformSettings(
        source_level=far_future_version,
        target_level=far_future_version,
        args=['$JAVA_HOME/foo', '$JAVA_HOME'],
      ))
      self.assertEquals(far_path, results[-1])
      self.assertEquals('{}/foo'.format(far_path), results[-2])

    # Make sure we pick the higher distribution when the lower one doesn't work.
    with fake_distribution_locator(farer_future_version, far_future_version) as paths:
      farer_path, far_path = paths
      results = ZincCompile._get_zinc_arguments(JvmPlatformSettings(
        source_level=farer_future_version,
        target_level=farer_future_version,
        args=['$JAVA_HOME/foo', '$JAVA_HOME'],
      ))
      self.assertEquals(farer_path, results[-1])
      self.assertEquals('{}/foo'.format(farer_path), results[-2])
コード例 #25
0
 def _get_zinc_arguments(self, settings):
   distribution = JvmCompile._local_jvm_distribution(settings=settings)
   return ZincCompile._format_zinc_arguments(settings, distribution)