예제 #1
0
    def test_get_product_target_mappings_for_targets_intransitive(self):
        b = self.make_target("b", JvmTarget, excludes=[Exclude("com.example", "lib")])
        a = self.make_target("a", JvmTarget, dependencies=[b])

        classpath_product = ClasspathProducts(self.pants_workdir)
        example_jar_path = self._example_jar_path()
        resolved_jar = self.add_jar_classpath_element_for_path(classpath_product, a, example_jar_path)

        classpath_product.add_for_target(b, [("default", self.path("b/loose/classes/dir"))])
        classpath_product.add_for_target(
            a, [("default", self.path("a/loose/classes/dir")), ("default", self.path("an/internally/generated.jar"))]
        )

        classpath_target_tuples = classpath_product.get_product_target_mappings_for_targets([a])
        self.assertEqual(
            [
                (
                    (
                        "default",
                        ArtifactClasspathEntry(example_jar_path, resolved_jar.coordinate, resolved_jar.cache_path),
                    ),
                    a,
                ),
                (("default", ClasspathEntry(self.path("a/loose/classes/dir"))), a),
                (("default", ClasspathEntry(self.path("an/internally/generated.jar"))), a),
            ],
            classpath_target_tuples,
        )
예제 #2
0
  def test_create_canonical_classpath_with_common_prefix(self):
    """
    A special case when two targets' canonical classpath share a common prefix.

    Until we use `target.id` for canonical classpath, today's implementation is error-prone.
    This is such a regression test case added for a bug discovered in
    https://github.com/pantsbuild/pants/pull/2664

    TODO(peiyu) Remove once we fully migrate to use `target.id`.
    """
    # a and c' canonical classpath share a common prefix: a/b/b
    a = self.make_target('a/b', JvmTarget)
    c = self.make_target('a/b/b/c', JvmTarget)

    classpath_products = ClasspathProducts(self.pants_workdir)

    classpath_products.add_for_target(a, [('default', self._path('a.jar'))])
    classpath_products.add_for_target(c, [('default', self._path('c.jar'))])

    # target c first to verify its first created canonical classpath is preserved
    self._test_canonical_classpath_helper(classpath_products, [c, a],
                                          [
                                            'a/b/b/c/c/0-c.jar',
                                            'a/b/b/0-a.jar',
                                          ],
                                          {
                                            'a/b/b/classpath.txt':
                                              '{}/a.jar\n'.format(self.pants_workdir),
                                            'a/b/b/c/c/classpath.txt':
                                              '{}/c.jar\n'.format(self.pants_workdir),
                                          },
                                          False)
  def test_copy(self):
    b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')])
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts(self.pants_workdir)
    resolved_jar = self.add_jar_classpath_element_for_path(classpath_product,
                                                           a,
                                                           self._example_jar_path())
    classpath_product.add_for_target(a, [('default', self.path('a/path'))])

    copied = classpath_product.copy()

    a_closure = a.closure(bfs=True)

    self.assertEqual([('default', resolved_jar.pants_path), ('default', self.path('a/path'))],
                     classpath_product.get_for_targets(a_closure))
    self.assertEqual([('default', resolved_jar.pants_path), ('default', self.path('a/path'))],
                     copied.get_for_targets(a_closure))

    self.add_excludes_for_targets(copied, b, a)
    self.assertEqual([('default', resolved_jar.pants_path), ('default', self.path('a/path'))],
                     classpath_product.get_for_targets(a_closure))
    self.assertEqual([('default', self.path('a/path'))],
                     copied.get_for_targets(a_closure))

    copied.add_for_target(b, [('default', self.path('b/path'))])
    self.assertEqual([('default', resolved_jar.pants_path), ('default', self.path('a/path'))],
                     classpath_product.get_for_targets(a_closure))
    self.assertEqual([('default', self.path('a/path')), ('default', self.path('b/path'))],
                     copied.get_for_targets(a_closure))
예제 #4
0
  def test_single_classpath_element_no_excludes(self):
    a = self.make_target('a', JvmTarget)

    classpath_product = ClasspathProducts()
    path = os.path.join(self.build_root, 'jar/path')
    classpath_product.add_for_target(a, [('default', path)])

    self.assertEqual([('default', path)], classpath_product.get_for_target(a))
  def test_fails_if_paths_outside_buildroot(self):
    a = self.make_target('a', JvmTarget)

    classpath_product = ClasspathProducts(self.pants_workdir)
    with self.assertRaises(TaskError) as cm:
      classpath_product.add_for_target(a, [('default', '/dev/null')])

    self.assertEqual(
      'Classpath entry /dev/null for target a:a is located outside the working directory "{}".'.format(self.pants_workdir),
      str(cm.exception))
예제 #6
0
  def test_excluded_classpath_element(self):
    a = self.make_target('a', JvmTarget, excludes=[Exclude('com.example', 'lib')])

    classpath_product = ClasspathProducts()
    example_jar_path = self._example_jar_path()
    classpath_product.add_for_target(a, [('default', example_jar_path)])
    classpath_product.add_excludes_for_targets([a])

    classpath = classpath_product.get_for_target(a)

    self.assertEqual([], classpath)
  def test_intransitive_dependencies_excluded_classpath_element(self):
    b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')])
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts(self.pants_workdir)
    example_jar_path = self._example_jar_path()
    classpath_product.add_for_target(a, [('default', example_jar_path)])
    classpath_product.add_excludes_for_targets([a, b])

    intransitive_classpath = classpath_product.get_for_target(a)
    self.assertEqual([('default', example_jar_path)], intransitive_classpath)
예제 #8
0
  def test_excludes_org_name(self):
    b = self.make_target('b', JvmTarget)
    a = self.make_target('a', JvmTarget, excludes=[Exclude('com.example')], dependencies=[b])

    classpath_product = ClasspathProducts()
    classpath_product.add_for_target(b, [('default', self._example_jar_path())])
    classpath_product.add_excludes_for_targets([a])

    classpath = classpath_product.get_for_target(a)

    self.assertEqual([], classpath)
예제 #9
0
  def test_excludes_used_across_targets(self):
    b = self.make_target('b', JvmTarget)
    a = self.make_target('a', JvmTarget, excludes=[Exclude('com.example', 'lib')])

    classpath_product = ClasspathProducts()
    classpath_product.add_for_target(b, [('default', self._example_jar_path())])
    classpath_product.add_excludes_for_targets([a])

    classpath = classpath_product.get_for_target(a)

    self.assertEqual([], classpath)
예제 #10
0
  def test_jar_provided_exclude_with_similar_org(self):
    provider = self.make_target('provider', ExportableJvmLibrary,
                         provides=Artifact('com.example.lib', '', Repository()))
    root = self.make_target('root', JvmTarget, dependencies=[provider])

    classpath_product = ClasspathProducts()
    classpath_product.add_for_target(root, [('default', self._example_jar_path())])
    classpath_product.add_excludes_for_targets([root, provider])

    classpath = classpath_product.get_for_target(root)

    self.assertEqual([('default', self._example_jar_path())], classpath)
예제 #11
0
  def test_parent_excludes_ignored_for_resolving_child_target(self):
    b = self.make_target('b', JvmTarget)
    a = self.make_target('a', JvmTarget, dependencies=[b], excludes=[Exclude('com.example', 'lib')])

    classpath_product = ClasspathProducts()
    example_jar_path = self._example_jar_path()
    classpath_product.add_for_target(b, [('default', example_jar_path)])
    classpath_product.add_excludes_for_targets([a])

    classpath = classpath_product.get_for_target(b)

    self.assertEqual([('default', example_jar_path)], classpath)
예제 #12
0
  def test_jar_provided_by_transitive_target_excluded(self):
    provider = self.make_target('provider', ExportableJvmLibrary,
                         provides=Artifact('com.example', 'lib', Repository()))
    consumer = self.make_target('consumer', JvmTarget)
    root = self.make_target('root', JvmTarget, dependencies=[provider, consumer])

    classpath_product = ClasspathProducts()
    classpath_product.add_for_target(consumer, [('default', self._example_jar_path())])
    classpath_product.add_excludes_for_targets([root, provider, consumer])

    classpath = classpath_product.get_for_target(root)

    self.assertEqual([], classpath)
  def test_jar_in_classpath_not_a_resolved_jar_ignored_by_excludes(self):
    b = self.make_target('b', JvmTarget)
    a = self.make_target('a', JvmTarget, excludes=[Exclude('com.example')], dependencies=[b])

    example_jar_path = self._example_jar_path()

    classpath_product = ClasspathProducts(self.pants_workdir)
    classpath_product.add_for_target(b, [('default', example_jar_path)])
    self.add_excludes_for_targets(classpath_product, a)

    classpath = classpath_product.get_for_targets(a.closure(bfs=True))

    self.assertEqual([('default', example_jar_path)], classpath)
예제 #14
0
  def test_jar_provided_exclude_with_similar_name(self):
    # note exclude 'jars/com.example/l' should not match jars/com.example/lib/jars/123.4.jar
    provider = self.make_target('provider', ExportableJvmLibrary,
                         provides=Artifact('com.example', 'li', Repository()))
    root = self.make_target('root', JvmTarget, dependencies=[provider])

    classpath_product = ClasspathProducts()
    classpath_product.add_for_target(root, [('default', self._example_jar_path())])
    classpath_product.add_excludes_for_targets([root, provider])

    classpath = classpath_product.get_for_target(root)

    self.assertEqual([('default', self._example_jar_path())], classpath)
예제 #15
0
  def test_path_with_overlapped_conf_added(self):
    a = self.make_target('a', JvmTarget)

    classpath_product = ClasspathProducts(self.pants_workdir)

    path = os.path.join(self.pants_workdir, 'jar/path')
    classpath_product.add_for_target(a, [('default', path)])

    classpath = ClasspathUtil.compute_classpath([a],
                                                classpath_product,
                                                extra_classpath_tuples=[],
                                                confs=['not-default', 'default'])

    self.assertEqual([path], classpath)
예제 #16
0
  def test_exclude_leaves_other_jars_unaffected(self):
    b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')])
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts()
    com_example_jar_path = self._example_jar_path()
    org_example_jar_path = os.path.join(self.build_root, 'ivy/jars/org.example/lib/123.4.jar')
    classpath_product.add_for_target(a, [('default', com_example_jar_path),
                                         ('default', org_example_jar_path)])
    classpath_product.add_excludes_for_targets([b])

    classpath = classpath_product.get_for_target(a)

    self.assertEqual([('default', org_example_jar_path)], classpath)
예제 #17
0
  def test_extra_path_added(self):
    a = self.make_target('a', JvmTarget)

    classpath_product = ClasspathProducts(self.pants_workdir)

    path = os.path.join(self.pants_workdir, 'jar/path')
    classpath_product.add_for_target(a, [('default', path)])

    extra_path = 'new-path'
    extra_cp_tuples = [('default', extra_path)]
    classpath = ClasspathUtil.compute_classpath([a],
                                                classpath_product,
                                                extra_classpath_tuples=extra_cp_tuples,
                                                confs=['default'])

    self.assertEqual([path, extra_path], classpath)
  def test_get_artifact_classpath_entries_for_targets(self):
    b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')])
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts(self.pants_workdir)
    example_jar_path = self._example_jar_path()
    resolved_jar = self.add_jar_classpath_element_for_path(classpath_product, a, example_jar_path)

    # These non-artifact classpath entries should be ignored.
    classpath_product.add_for_target(b, [('default', self.path('b/loose/classes/dir'))])
    classpath_product.add_for_target(a, [('default', self.path('a/loose/classes/dir')),
                                         ('default', self.path('an/internally/generated.jar'))])

    classpath = classpath_product.get_artifact_classpath_entries_for_targets([a])
    self.assertEqual([('default', ArtifactClasspathEntry(example_jar_path,
                                                         resolved_jar.coordinate,
                                                         resolved_jar.cache_path))],
                     classpath)
예제 #19
0
  def test_create_canonical_classpath(self):
    a = self.make_target('a/b', JvmTarget)

    classpath_products = ClasspathProducts(self.pants_workdir)

    classpath_products.add_for_target(a, [('default', self._path('a.jar')),
                                          ('default', self._path('resources'))])

    self._test_canonical_classpath_helper(classpath_products, [a],
                                          [
                                            'a.b.b/0-a.jar',
                                            'a.b.b/1-resources'
                                          ],
                                          {
                                            'a.b.b/classpath.txt':
                                              '{}/a.jar:{}/resources\n'.format(self.pants_workdir,
                                                                               self.pants_workdir)
                                           },
                                          True)
  def test_get_internal_classpath_entries_for_targets(self):
    b = self.make_target('b', JvmTarget)
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts(self.pants_workdir)

    # This artifact classpath entry should be ignored.
    example_jar_path = self._example_jar_path()
    self.add_jar_classpath_element_for_path(classpath_product, a, example_jar_path)

    classpath_product.add_for_target(b, [('default', self.path('b/loose/classes/dir'))])
    classpath_product.add_for_target(a, [('default', self.path('a/loose/classes/dir')),
                                         ('default', self.path('an/internally/generated.jar'))])

    classpath = classpath_product.get_internal_classpath_entries_for_targets(a.closure(bfs=True))
    self.assertEqual([('default', ClasspathEntry(self.path('a/loose/classes/dir'))),
                      ('default', ClasspathEntry(self.path('an/internally/generated.jar'))),
                      ('default', ClasspathEntry(self.path('b/loose/classes/dir')))],
                     classpath)
예제 #21
0
  def test_get_internal_classpath_entries_for_targets(self):
    b = self.make_target('b', JvmTarget)
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts(self.pants_workdir)

    # This artifact classpath entry should be ignored.
    example_jar_path = self._example_jar_path()
    self.add_jar_classpath_element_for_path(classpath_product, a, example_jar_path)

    classpath_product.add_for_target(b, [('default', self.path('b/loose/classes/dir'))])
    classpath_product.add_for_target(a, [('default', self.path('a/loose/classes/dir')),
                                         ('default', self.path('an/internally/generated.jar'))])

    classpath = classpath_product.get_internal_classpath_entries_for_targets(a.closure(bfs=True))
    self.assertEqual([('default', ClasspathEntry(self.path('a/loose/classes/dir'))),
                      ('default', ClasspathEntry(self.path('an/internally/generated.jar'))),
                      ('default', ClasspathEntry(self.path('b/loose/classes/dir')))],
                     classpath)
예제 #22
0
  def test_get_classpath_entries_for_targets_intransitive(self):
    b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')])
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts()
    example_jar_path = self._example_jar_path()
    resolved_jar = self.add_jar_classpath_element_for_path(classpath_product, a, example_jar_path)

    classpath_product.add_for_target(b, [('default', self.path('b/loose/classes/dir'))])
    classpath_product.add_for_target(a, [('default', self.path('a/loose/classes/dir')),
                                         ('default', self.path('an/internally/generated.jar'))])

    classpath = classpath_product.get_classpath_entries_for_targets([a], transitive=False)
    self.assertEqual([('default', ArtifactClasspathEntry(example_jar_path,
                                                         resolved_jar.coordinate,
                                                         resolved_jar.cache_path)),
                      ('default', ClasspathEntry(self.path('a/loose/classes/dir'))),
                      ('default', ClasspathEntry(self.path('an/internally/generated.jar')))],
                     classpath)
예제 #23
0
    def test_get_product_target_mappings_for_targets_intransitive(self):
        b = self.make_target("b",
                             JvmTarget,
                             excludes=[Exclude("com.example", "lib")])
        a = self.make_target("a", JvmTarget, dependencies=[b])

        classpath_product = ClasspathProducts(self.pants_workdir)
        example_jar_path = self._example_jar_path()
        resolved_jar = self.add_jar_classpath_element_for_path(
            classpath_product, a, example_jar_path)

        classpath_product.add_for_target(
            b, [("default", self.path("b/loose/classes/dir"))])
        classpath_product.add_for_target(
            a,
            [
                ("default", self.path("a/loose/classes/dir")),
                ("default", self.path("an/internally/generated.jar")),
            ],
        )

        classpath_target_tuples = classpath_product.get_product_target_mappings_for_targets(
            [a])
        self.assertEqual(
            [
                (
                    (
                        "default",
                        ArtifactClasspathEntry(example_jar_path,
                                               resolved_jar.coordinate,
                                               resolved_jar.cache_path),
                    ),
                    a,
                ),
                (("default", ClasspathEntry(
                    self.path("a/loose/classes/dir"))), a),
                (("default",
                  ClasspathEntry(
                      self.path("an/internally/generated.jar"))), a),
            ],
            classpath_target_tuples,
        )
  def test_get_product_target_mappings_for_targets_transitive(self):
    b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')])
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts(self.pants_workdir)
    example_jar_path = self._example_jar_path()
    resolved_jar = self.add_jar_classpath_element_for_path(classpath_product, a, example_jar_path)

    classpath_product.add_for_target(b, [('default', self.path('b/loose/classes/dir'))])
    classpath_product.add_for_target(a, [('default', self.path('a/loose/classes/dir')),
                                         ('default', self.path('an/internally/generated.jar'))])

    classpath_target_tuples = classpath_product.get_product_target_mappings_for_targets(a.closure(bfs=True))
    self.assertEqual([
      (('default', ArtifactClasspathEntry(example_jar_path,
                                          resolved_jar.coordinate,
                                          resolved_jar.cache_path)), a),
      (('default', ClasspathEntry(self.path('a/loose/classes/dir'))), a),
      (('default', ClasspathEntry(self.path('an/internally/generated.jar'))), a),
      (('default', ClasspathEntry(self.path('b/loose/classes/dir'))), b)],
      classpath_target_tuples)
예제 #25
0
  def test_get_product_target_mappings_for_targets_transitive(self):
    b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')])
    a = self.make_target('a', JvmTarget, dependencies=[b])

    classpath_product = ClasspathProducts(self.pants_workdir)
    example_jar_path = self._example_jar_path()
    resolved_jar = self.add_jar_classpath_element_for_path(classpath_product, a, example_jar_path)

    classpath_product.add_for_target(b, [('default', self.path('b/loose/classes/dir'))])
    classpath_product.add_for_target(a, [('default', self.path('a/loose/classes/dir')),
                                         ('default', self.path('an/internally/generated.jar'))])

    classpath_target_tuples = classpath_product.get_product_target_mappings_for_targets(a.closure(bfs=True))
    self.assertEqual([
      (('default', ArtifactClasspathEntry(example_jar_path,
                                          resolved_jar.coordinate,
                                          resolved_jar.cache_path)), a),
      (('default', ClasspathEntry(self.path('a/loose/classes/dir'))), a),
      (('default', ClasspathEntry(self.path('an/internally/generated.jar'))), a),
      (('default', ClasspathEntry(self.path('b/loose/classes/dir'))), b)],
      classpath_target_tuples)
예제 #26
0
  def test_classpath_by_targets(self):
    b = self.make_target('b', JvmTarget)
    a = self.make_target('a', JvmTarget, dependencies=[b],
                         excludes=[Exclude('com.example', 'lib')])

    classpath_products = ClasspathProducts(self.pants_workdir)

    path1 = self._path('jar/path1')
    path2 = self._path('jar/path2')
    path3 = os.path.join(self.pants_workdir, 'jar/path3')
    resolved_jar = ResolvedJar(M2Coordinate(org='com.example', name='lib', rev='1.0'),
                               cache_path='somewhere',
                               pants_path=path3)
    classpath_products.add_for_target(a, [('default', path1)])
    classpath_products.add_for_target(a, [('non-default', path2)])
    classpath_products.add_for_target(b, [('default', path2)])
    classpath_products.add_jars_for_targets([b], 'default', [resolved_jar])
    classpath_products.add_excludes_for_targets([a])

    # (a, path2) filtered because of conf
    # (b, path3) filtered because of excludes
    self.assertEquals(OrderedDict([(a, [ClasspathEntry(path1)]),
                                   (b, [ClasspathEntry(path2)])]),
                      ClasspathUtil.classpath_by_targets(a.closure(bfs=True),
                                                         classpath_products))
예제 #27
0
    def test_classpath_by_targets(self):
        b = self.make_target("b", JvmTarget)
        a = self.make_target("a",
                             JvmTarget,
                             dependencies=[b],
                             excludes=[Exclude("com.example", "lib")])

        classpath_products = ClasspathProducts(self.pants_workdir)

        path1 = self._path("jar/path1")
        path2 = self._path("jar/path2")
        path3 = os.path.join(self.pants_workdir, "jar/path3")
        resolved_jar = ResolvedJar(
            M2Coordinate(org="com.example", name="lib", rev="1.0"),
            cache_path="somewhere",
            pants_path=path3,
        )
        classpath_products.add_for_target(a, [("default", path1)])
        classpath_products.add_for_target(a, [("non-default", path2)])
        classpath_products.add_for_target(b, [("default", path2)])
        classpath_products.add_jars_for_targets([b], "default", [resolved_jar])
        classpath_products.add_excludes_for_targets([a])

        # (a, path2) filtered because of conf
        # (b, path3) filtered because of excludes
        self.assertEqual(
            OrderedDict([(a, [ClasspathEntry(path1)]),
                         (b, [ClasspathEntry(path2)])]),
            ClasspathUtil.classpath_by_targets(a.closure(bfs=True),
                                               classpath_products),
        )
예제 #28
0
    def test_classpath_by_targets(self):
        b = self.make_target('b', JvmTarget)
        a = self.make_target('a',
                             JvmTarget,
                             dependencies=[b],
                             excludes=[Exclude('com.example', 'lib')])

        classpath_products = ClasspathProducts(self.pants_workdir)

        path1 = self._path('jar/path1')
        path2 = self._path('jar/path2')
        path3 = os.path.join(self.pants_workdir, 'jar/path3')
        resolved_jar = ResolvedJar(M2Coordinate(org='com.example',
                                                name='lib',
                                                rev='1.0'),
                                   cache_path='somewhere',
                                   pants_path=path3)
        classpath_products.add_for_target(a, [('default', path1)])
        classpath_products.add_for_target(a, [('non-default', path2)])
        classpath_products.add_for_target(b, [('default', path2)])
        classpath_products.add_jars_for_targets([b], 'default', [resolved_jar])
        classpath_products.add_excludes_for_targets([a])

        # (a, path2) filtered because of conf
        # (b, path3) filtered because of excludes
        self.assertEqual(
            OrderedDict([(a, [ClasspathEntry(path1)]),
                         (b, [ClasspathEntry(path2)])]),
            ClasspathUtil.classpath_by_targets(a.closure(bfs=True),
                                               classpath_products))
예제 #29
0
    def test_create_canonical_classpath(self):
        a = self.make_target('a/b', JvmTarget)

        jar_path = 'ivy/jars/org.x/lib/x-1.0.jar'
        jar_path_excluded = 'ivy/jars/org.y/lib/y-1.0.jar'
        classpath_products = ClasspathProducts(self.pants_workdir)

        resolved_jar = ResolvedJar(M2Coordinate(org='org', name='x',
                                                rev='1.0'),
                                   cache_path='somewhere',
                                   pants_path=self._path(jar_path))
        # org.y should be excluded from result canonical path
        resolved_jar_to_exclude = ResolvedJar(
            M2Coordinate(org='org', name='y', rev='1.0'),
            cache_path='somewhere',
            pants_path=self._path(jar_path_excluded))

        classpath_products.add_for_target(
            a, [('default', self._path('a.jar')),
                ('default', self._path('resources'))])
        classpath_products.add_jars_for_targets([a], 'default', [resolved_jar])

        with temporary_dir() as base_dir:
            self._test_canonical_classpath_helper(
                classpath_products, [a],
                base_dir,
                True, [
                    'a.b.b-0.jar',
                    'a.b.b-1',
                    'a.b.b-2.jar',
                ], {
                    'a.b.b-classpath.txt':
                    '{}/a.jar:{}/resources:{}/{}\n'.format(
                        self.pants_workdir, self.pants_workdir,
                        self.pants_workdir, jar_path)
                },
                excludes=set([Exclude(org='org', name='y')]))

        # incrementally delete the resource dendendency
        classpath_products = ClasspathProducts(self.pants_workdir)
        classpath_products.add_for_target(a,
                                          [('default', self._path('a.jar'))])
        self._test_canonical_classpath_helper(
            classpath_products, [a], base_dir, True, [
                'a.b.b-0.jar',
            ],
            {'a.b.b-classpath.txt': '{}/a.jar\n'.format(self.pants_workdir)})

        # incrementally add another jar dependency
        classpath_products = ClasspathProducts(self.pants_workdir)
        classpath_products.add_for_target(a,
                                          [('default', self._path('a.jar')),
                                           ('default', self._path('b.jar'))])
        self._test_canonical_classpath_helper(
            classpath_products, [a], base_dir, True,
            ['a.b.b-0.jar', 'a.b.b-1.jar'], {
                'a.b.b-classpath.txt':
                '{}/a.jar:{}/b.jar\n'.format(self.pants_workdir,
                                             self.pants_workdir)
            })
예제 #30
0
    def test_copy(self):
        b = self.make_target('b',
                             JvmTarget,
                             excludes=[Exclude('com.example', 'lib')])
        a = self.make_target('a', JvmTarget, dependencies=[b])

        classpath_product = ClasspathProducts(self.pants_workdir)
        resolved_jar = self.add_jar_classpath_element_for_path(
            classpath_product, a, self._example_jar_path())
        classpath_product.add_for_target(a, [('default', self.path('a/path'))])

        copied = classpath_product.copy()

        a_closure = a.closure(bfs=True)

        self.assertEqual([('default', resolved_jar.pants_path),
                          ('default', self.path('a/path'))],
                         classpath_product.get_for_targets(a_closure))
        self.assertEqual([('default', resolved_jar.pants_path),
                          ('default', self.path('a/path'))],
                         copied.get_for_targets(a_closure))

        self.add_excludes_for_targets(copied, b, a)
        self.assertEqual([('default', resolved_jar.pants_path),
                          ('default', self.path('a/path'))],
                         classpath_product.get_for_targets(a_closure))
        self.assertEqual([('default', self.path('a/path'))],
                         copied.get_for_targets(a_closure))

        copied.add_for_target(b, [('default', self.path('b/path'))])
        self.assertEqual([('default', resolved_jar.pants_path),
                          ('default', self.path('a/path'))],
                         classpath_product.get_for_targets(a_closure))
        self.assertEqual([('default', self.path('a/path')),
                          ('default', self.path('b/path'))],
                         copied.get_for_targets(a_closure))
예제 #31
0
    def test_get_artifact_classpath_entries_for_targets(self):
        b = self.make_target('b',
                             JvmTarget,
                             excludes=[Exclude('com.example', 'lib')])
        a = self.make_target('a', JvmTarget, dependencies=[b])

        classpath_product = ClasspathProducts(self.pants_workdir)
        example_jar_path = self._example_jar_path()
        resolved_jar = self.add_jar_classpath_element_for_path(
            classpath_product, a, example_jar_path)

        # These non-artifact classpath entries should be ignored.
        classpath_product.add_for_target(
            b, [('default', self.path('b/loose/classes/dir'))])
        classpath_product.add_for_target(
            a, [('default', self.path('a/loose/classes/dir')),
                ('default', self.path('an/internally/generated.jar'))])

        classpath = classpath_product.get_artifact_classpath_entries_for_targets(
            [a])
        self.assertEqual(
            [('default',
              ArtifactClasspathEntry(example_jar_path, resolved_jar.coordinate,
                                     resolved_jar.cache_path))], classpath)
예제 #32
0
  def test_create_canonical_classpath(self):
    a = self.make_target('a/b', JvmTarget)

    classpath_products = ClasspathProducts(self.pants_workdir)

    classpath_products.add_for_target(a, [('default', self._path('a.jar')),
                                          ('default', self._path('resources'))])

    with temporary_dir() as base_dir:
      self._test_canonical_classpath_helper(classpath_products, [a],
                                            base_dir, True,
                                            [
                                              'a.b.b-0.jar',
                                              'a.b.b-1'
                                            ],
                                            {
                                              'a.b.b-classpath.txt':
                                              '{}/a.jar:{}/resources\n'.format(self.pants_workdir,
                                                                               self.pants_workdir)
                                            })

    # incrementally delete the resource dendendency
    classpath_products = ClasspathProducts(self.pants_workdir)
    classpath_products.add_for_target(a, [('default', self._path('a.jar'))])
    self._test_canonical_classpath_helper(classpath_products, [a],
                                          base_dir, True,
                                          [
                                            'a.b.b-0.jar',
                                          ],
                                          {
                                            'a.b.b-classpath.txt':
                                            '{}/a.jar\n'.format(self.pants_workdir)
                                          })

    # incrementally add another jar dependency
    classpath_products = ClasspathProducts(self.pants_workdir)
    classpath_products.add_for_target(a, [('default', self._path('a.jar')),
                                          ('default', self._path('b.jar'))])
    self._test_canonical_classpath_helper(classpath_products, [a],
                                          base_dir, True,
                                          [
                                            'a.b.b-0.jar',
                                            'a.b.b-1.jar'
                                          ],
                                          {
                                            'a.b.b-classpath.txt':
                                            '{}/a.jar:{}/b.jar\n'.format(self.pants_workdir,
                                                                         self.pants_workdir)
                                          })
예제 #33
0
    def test_create_canonical_classpath(self):
        a = self.make_target("a/b", JvmTarget)

        jar_path = "ivy/jars/org.x/lib/x-1.0.jar"
        classpath_products = ClasspathProducts(self.pants_workdir)

        resolved_jar = ResolvedJar(
            M2Coordinate(org="org", name="x", rev="1.0"),
            cache_path="somewhere",
            pants_path=self._create_file(jar_path),
        )

        classpath_products.add_for_target(
            a,
            [("default", self._create_file("a.jar")),
             ("default", self._create_file("resources"))],
        )
        classpath_products.add_jars_for_targets([a], "default", [resolved_jar])

        with temporary_dir() as base_dir:
            self._test_canonical_classpath_helper(
                classpath_products,
                [a],
                base_dir,
                ["a.b.b-0.jar", "a.b.b-1", "a.b.b-2.jar"],
                {
                    "a.b.b-classpath.txt":
                    "{}/a.jar:{}/resources:{}/{}\n".format(
                        self.pants_workdir, self.pants_workdir,
                        self.pants_workdir, jar_path)
                },
                excludes={Exclude(org="org", name="y")},
            )

        # incrementally delete the resource dependency
        classpath_products = ClasspathProducts(self.pants_workdir)
        classpath_products.add_for_target(
            a, [("default", self._create_file("a.jar"))])
        self._test_canonical_classpath_helper(
            classpath_products,
            [a],
            base_dir,
            ["a.b.b-0.jar"],
            {"a.b.b-classpath.txt": f"{self.pants_workdir}/a.jar\n"},
        )

        # incrementally add another jar dependency
        classpath_products = ClasspathProducts(self.pants_workdir)
        classpath_products.add_for_target(
            a, [("default", self._create_file("a.jar")),
                ("default", self._create_file("b.jar"))])
        self._test_canonical_classpath_helper(
            classpath_products,
            [a],
            base_dir,
            ["a.b.b-0.jar", "a.b.b-1.jar"],
            {
                "a.b.b-classpath.txt":
                "{}/a.jar:{}/b.jar\n".format(self.pants_workdir,
                                             self.pants_workdir)
            },
        )
예제 #34
0
class RscCompile(ZincCompile):
  """Compile Scala and Java code to classfiles using Rsc."""

  _name = 'rsc' # noqa
  compiler_name = 'rsc'

  def __init__(self, *args, **kwargs):
    super(RscCompile, self).__init__(*args, **kwargs)
    self._metacp_jars_classpath_product = ClasspathProducts(self.get_options().pants_workdir)

  @classmethod
  def implementation_version(cls):
    return super(RscCompile, cls).implementation_version() + [('RscCompile', 170)]

  @classmethod
  def register_options(cls, register):
    super(RscCompile, cls).register_options(register)

    rsc_toolchain_version = '0.0.0-294-d7114447'
    scalameta_toolchain_version = '4.0.0-M10'

    # TODO: it would be better to have a less adhoc approach to handling
    #       optional dependencies. See: https://github.com/pantsbuild/pants/issues/6390
    cls.register_jvm_tool(
      register,
      'workaround-metacp-dependency-classpath',
      classpath=[
        JarDependency(org = 'org.scala-lang', name = 'scala-compiler', rev = '2.11.12'),
        JarDependency(org = 'org.scala-lang', name = 'scala-library', rev = '2.11.12'),
        JarDependency(org = 'org.scala-lang', name = 'scala-reflect', rev = '2.11.12'),
        JarDependency(org = 'org.scala-lang.modules', name = 'scala-partest_2.11', rev = '1.0.18'),
        JarDependency(org = 'jline', name = 'jline', rev = '2.14.6'),
        JarDependency(org = 'org.apache.commons', name = 'commons-lang3', rev = '3.3.2'),
        JarDependency(org = 'org.apache.ant', name = 'ant', rev = '1.8.2'),
        JarDependency(org = 'org.pegdown', name = 'pegdown', rev = '1.4.2'),
        JarDependency(org = 'org.testng', name = 'testng', rev = '6.8.7'),
        JarDependency(org = 'org.scalacheck', name = 'scalacheck_2.11', rev = '1.13.1'),
        JarDependency(org = 'org.jmock', name = 'jmock-legacy', rev = '2.5.1'),
        JarDependency(org = 'org.easymock', name = 'easymockclassextension', rev = '3.1'),
        JarDependency(org = 'org.seleniumhq.selenium', name = 'selenium-java', rev = '2.35.0'),
      ],
      custom_rules=[
        Shader.exclude_package('*', recursive=True),]
    )
    cls.register_jvm_tool(
      register,
      'rsc',
      classpath=[
          JarDependency(
              org='com.twitter',
              name='rsc_2.11',
              rev=rsc_toolchain_version,
          ),
      ],
      custom_rules=[
        Shader.exclude_package('rsc', recursive=True),
      ])
    cls.register_jvm_tool(
      register,
      'mjar',
      classpath=[
          JarDependency(
              org='com.twitter',
              name='mjar_2.11',
              rev=rsc_toolchain_version,
          ),
      ],
      custom_rules=[
        Shader.exclude_package('scala', recursive=True),
      ])
    cls.register_jvm_tool(
      register,
      'metacp',
      classpath=[
          JarDependency(
            org='org.scalameta',
            name='metacp_2.11',
            rev=scalameta_toolchain_version,
          ),
      ],
      custom_rules=[
        Shader.exclude_package('scala', recursive=True),
      ])
    cls.register_jvm_tool(
      register,
      'metai',
      classpath=[
          JarDependency(
            org='org.scalameta',
            name='metai_2.11',
            rev=scalameta_toolchain_version,
          ),
      ],
      custom_rules=[
        Shader.exclude_package('scala', recursive=True),
      ])

  def register_extra_products_from_contexts(self, targets, compile_contexts):
    super(RscCompile, self).register_extra_products_from_contexts(targets, compile_contexts)
    # TODO when digests are added, if the target is valid,
    # the digest should be loaded in from the cc somehow.
    # See: #6504
    for target in targets:
      rsc_cc, compile_cc = compile_contexts[target]
      if self._only_zinc_compileable(target):
        self.context.products.get_data('rsc_classpath').add_for_target(
          compile_cc.target,
          [(conf, compile_cc.jar_file) for conf in self._confs])
      elif self._rsc_compilable(target):
        self.context.products.get_data('rsc_classpath').add_for_target(
          rsc_cc.target,
          [(conf, rsc_cc.rsc_mjar_file) for conf in self._confs])
      elif self._metacpable(target):
        # Walk the metacp results dir and add classpath entries for all the files there.
        # TODO exercise this with a test.
        for root, dirs, files in os.walk(rsc_cc.rsc_index_dir):
          self.context.products.get_data('rsc_classpath').add_for_target(
            rsc_cc.target,
            [(conf, os.path.join(root, f)) for conf in self._confs for f in files]
          )
      else:
        pass

  def _metacpable(self, target):
    return isinstance(target, JarLibrary)

  def _rsc_compilable(self, target):
    return target.has_sources('.scala')

  def _only_zinc_compileable(self, target):
    return target.has_sources('.java')

  def create_empty_extra_products(self):
    super(RscCompile, self).create_empty_extra_products()

    compile_classpath = self.context.products.get_data('compile_classpath')
    classpath_product = self.context.products.get_data('rsc_classpath')
    if not classpath_product:
      self.context.products.get_data('rsc_classpath', compile_classpath.copy)
    else:
      classpath_product.update(compile_classpath)

  def select(self, target):
    # Require that targets are marked for JVM compilation, to differentiate from
    # targets owned by the scalajs contrib module.
    if self._metacpable(target):
      return True
    if not isinstance(target, JvmTarget):
      return False
    return self._only_zinc_compileable(target) or self._rsc_compilable(target)

  def _rsc_key_for_target(self, compile_target):
    if self._only_zinc_compileable(compile_target):
      # rsc outlining with java dependencies depend on the java's zinc compile
      return self._compile_against_rsc_key_for_target(compile_target)
    elif self._rsc_compilable(compile_target):
      return "rsc({})".format(compile_target.address.spec)
    elif self._metacpable(compile_target):
      return "metacp({})".format(compile_target.address.spec)
    else:
      raise TaskError('unexpected target for compiling with rsc .... {}'.format(compile_target))

  def _compile_against_rsc_key_for_target(self, compile_target):
    return "compile_against_rsc({})".format(compile_target.address.spec)

  def create_compile_jobs(self,
                          compile_target,
                          compile_contexts,
                          invalid_dependencies,
                          ivts,
                          counter,
                          runtime_classpath_product):

    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target

      if not hit_cache:
        cp_entries = []

        # Include the current machine's jdk lib jars. This'll blow up remotely.
        # We need a solution for that.
        # Probably something to do with https://github.com/pantsbuild/pants/pull/6346
        distribution = JvmPlatform.preferred_jvm_distribution([ctx.target.platform], strict=True)
        jvm_lib_jars_abs = distribution.find_libs(['rt.jar', 'dt.jar', 'jce.jar', 'tools.jar'])
        cp_entries.extend(jvm_lib_jars_abs)

        classpath_abs = self._zinc.compile_classpath(
          'rsc_classpath',
          ctx.target,
          extra_cp_entries=self._extra_compile_time_classpath)

        jar_deps = [t for t in DependencyContext.global_instance().dependencies_respecting_strict_deps(target)
                    if isinstance(t, JarLibrary)]
        metacp_jar_classpath_abs = [y[1] for y in self._metacp_jars_classpath_product.get_for_targets(
          jar_deps
        )]
        jar_jar_paths = {y[1] for y in self.context.products.get_data('rsc_classpath').get_for_targets(jar_deps)}

        classpath_abs = [c for c in classpath_abs if c not in jar_jar_paths]


        classpath_rel = fast_relpath_collection(classpath_abs)
        metacp_jar_classpath_rel = fast_relpath_collection(metacp_jar_classpath_abs)
        cp_entries.extend(classpath_rel)

        ctx.ensure_output_dirs_exist()

        counter_val = str(counter()).rjust(counter.format_length(), b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        tgt, = vts.targets
        with Timer() as timer:
          # Step 1: Convert classpath to SemanticDB
          # ---------------------------------------
          scalac_classpath_path_entries_abs = self.tool_classpath('workaround-metacp-dependency-classpath')
          scalac_classpath_path_entries = fast_relpath_collection(scalac_classpath_path_entries_abs)
          rsc_index_dir = fast_relpath(ctx.rsc_index_dir, get_buildroot())
          args = [
            '--verbose',
            # NB: Without this setting, rsc will be missing some symbols
            #     from the scala library.
            '--include-scala-library-synthetics', # TODO generate these once and cache them
            # NB: We need to add these extra dependencies in order to be able
            #     to find symbols used by the scalac jars.
            '--dependency-classpath', os.pathsep.join(scalac_classpath_path_entries + list(jar_jar_paths)),
            # NB: The directory to dump the semanticdb jars generated by metacp.
            '--out', rsc_index_dir,
            os.pathsep.join(cp_entries),
          ]
          metacp_wu = self._runtool(
            'scala.meta.cli.Metacp',
            'metacp',
            args,
            distribution,
            tgt=tgt,
            input_files=(scalac_classpath_path_entries + classpath_rel),
            output_dir=rsc_index_dir)
          metacp_stdout = stdout_contents(metacp_wu)
          metacp_result = json.loads(metacp_stdout)


          metai_classpath = self._collect_metai_classpath(
            metacp_result, classpath_rel, jvm_lib_jars_abs)

          # Step 1.5: metai Index the semanticdbs
          # -------------------------------------
          self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt)

          # Step 2: Outline Scala sources into SemanticDB
          # ---------------------------------------------
          rsc_outline_dir = fast_relpath(ctx.rsc_outline_dir, get_buildroot())
          rsc_out = os.path.join(rsc_outline_dir, 'META-INF/semanticdb/out.semanticdb')
          safe_mkdir(os.path.join(rsc_outline_dir, 'META-INF/semanticdb'))
          target_sources = ctx.sources
          args = [
            '-cp', os.pathsep.join(metai_classpath + metacp_jar_classpath_rel),
            '-out', rsc_out,
          ] + target_sources
          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            # TODO pass the input files from the target snapshot instead of the below
            # input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
            input_files=target_sources + metai_classpath + metacp_jar_classpath_rel,
            output_dir=rsc_outline_dir)
          rsc_classpath = [rsc_outline_dir]

          # Step 2.5: Postprocess the rsc outputs
          # TODO: This is only necessary as a workaround for https://github.com/twitter/rsc/issues/199.
          # Ideally, Rsc would do this on its own.
          self._run_metai_tool(distribution,
            rsc_classpath,
            rsc_outline_dir,
            tgt,
            extra_input_files=(rsc_out,))


          # Step 3: Convert SemanticDB into an mjar
          # ---------------------------------------
          rsc_mjar_file = fast_relpath(ctx.rsc_mjar_file, get_buildroot())
          args = [
            '-out', rsc_mjar_file,
            os.pathsep.join(rsc_classpath),
          ]
          self._runtool(
            'scala.meta.cli.Mjar',
            'mjar',
            args,
            distribution,
            tgt=tgt,
            input_files=(
              rsc_out,
            ),
            output_dir=os.path.dirname(rsc_mjar_file)
            )
          self.context.products.get_data('rsc_classpath').add_for_target(
            ctx.target,
            [(conf, ctx.rsc_mjar_file) for conf in self._confs],
          )

        self._record_target_stats(tgt,
                                  len(cp_entries),
                                  len(target_sources),
                                  timer.elapsed,
                                  False,
                                  'rsc'
                                  )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)

    def work_for_vts_rsc_jar_library(vts, ctx):

      cp_entries = []

      # Include the current machine's jdk lib jars. This'll blow up remotely.
      # We need a solution for that.
      # Probably something to do with https://github.com/pantsbuild/pants/pull/6346
      # TODO perhaps determine the platform of the jar and use that here.
      # https://github.com/pantsbuild/pants/issues/6547
      distribution = JvmPlatform.preferred_jvm_distribution([], strict=True)
      jvm_lib_jars_abs = distribution.find_libs(['rt.jar', 'dt.jar', 'jce.jar', 'tools.jar'])
      cp_entries.extend(jvm_lib_jars_abs)

      # TODO use compile_classpath
      classpath_abs = [
        path for (conf, path) in
        self.context.products.get_data('rsc_classpath').get_for_target(ctx.target)
      ]
      dependency_classpath = self._zinc.compile_classpath(
        'compile_classpath',
        ctx.target,
        extra_cp_entries=self._extra_compile_time_classpath)
      classpath_rel = fast_relpath_collection(classpath_abs)
      cp_entries.extend(classpath_rel)

      counter_val = str(counter()).rjust(counter.format_length(), b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      self.context.log.info(
        counter_str,
        'Metacp-ing ',
        items_to_report_element(cp_entries, 'jar'),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        ctx.target.address.spec,
        ').')

      ctx.ensure_output_dirs_exist()

      tgt, = vts.targets
      with Timer() as timer:
      # Step 1: Convert classpath to SemanticDB
        # ---------------------------------------
        scalac_classpath_path_entries_abs = self.tool_classpath('workaround-metacp-dependency-classpath')
        scalac_classpath_path_entries = fast_relpath_collection(scalac_classpath_path_entries_abs)
        rsc_index_dir = fast_relpath(ctx.rsc_index_dir, get_buildroot())
        args = [
          '--verbose',
          # NB: Without this setting, rsc will be missing some symbols
          #     from the scala library.
          '--include-scala-library-synthetics', # TODO generate these once and cache them
          # NB: We need to add these extra dependencies in order to be able
          #     to find symbols used by the scalac jars.
          '--dependency-classpath', os.pathsep.join(dependency_classpath + scalac_classpath_path_entries),
          # NB: The directory to dump the semanticdb jars generated by metacp.
          '--out', rsc_index_dir,
          os.pathsep.join(cp_entries),
        ]
        metacp_wu = self._runtool(
          'scala.meta.cli.Metacp',
          'metacp',
          args,
          distribution,
          tgt=tgt,
          input_files=(scalac_classpath_path_entries + classpath_rel),
          output_dir=rsc_index_dir)
        metacp_stdout = stdout_contents(metacp_wu)
        metacp_result = json.loads(metacp_stdout)

        metai_classpath = self._collect_metai_classpath(
          metacp_result, classpath_rel, jvm_lib_jars_abs)

        # Step 1.5: metai Index the semanticdbs
        # -------------------------------------
        self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt)

        abs_output = [(conf, os.path.join(get_buildroot(), x))
                      for conf in self._confs for x in metai_classpath]

        self._metacp_jars_classpath_product.add_for_target(
          ctx.target,
          abs_output,
        )

      self._record_target_stats(tgt,
          len(abs_output),
          len([]),
          timer.elapsed,
          False,
          'metacp'
        )

    rsc_jobs = []
    zinc_jobs = []

    # Invalidated targets are a subset of relevant targets: get the context for this one.
    compile_target = ivts.target
    compile_context_pair = compile_contexts[compile_target]

    # Create the rsc job.
    # Currently, rsc only supports outlining scala.
    if self._only_zinc_compileable(compile_target):
      pass
    elif self._rsc_compilable(compile_target):
      rsc_key = self._rsc_key_for_target(compile_target)
      rsc_jobs.append(
        Job(
          rsc_key,
          functools.partial(
            work_for_vts_rsc,
            ivts,
            compile_context_pair[0]),
          [self._rsc_key_for_target(target) for target in invalid_dependencies],
          self._size_estimator(compile_context_pair[0].sources),
        )
      )
    elif self._metacpable(compile_target):
      rsc_key = self._rsc_key_for_target(compile_target)
      rsc_jobs.append(
        Job(
          rsc_key,
          functools.partial(
            work_for_vts_rsc_jar_library,
            ivts,
            compile_context_pair[0]),
          [self._rsc_key_for_target(target) for target in invalid_dependencies],
          self._size_estimator(compile_context_pair[0].sources),
          on_success=ivts.update,
          on_failure=ivts.force_invalidate,
        )
      )
    else:
      raise TaskError("Unexpected target for rsc compile {} with type {}"
        .format(compile_target, type(compile_target)))

    # Create the zinc compile jobs.
    # - Scala zinc compile jobs depend on the results of running rsc on the scala target.
    # - Java zinc compile jobs depend on the zinc compiles of their dependencies, because we can't
    #   generate mjars that make javac happy at this point.

    invalid_dependencies_without_jar_metacps = [t for t in invalid_dependencies
      if not self._metacpable(t)]
    if self._rsc_compilable(compile_target):
      full_key = self._compile_against_rsc_key_for_target(compile_target)
      zinc_jobs.append(
        Job(
          full_key,
          functools.partial(
            self._default_work_for_vts,
            ivts,
            compile_context_pair[1],
            'rsc_classpath',
            counter,
            compile_contexts,
            runtime_classpath_product),
          [
            self._rsc_key_for_target(compile_target)
          ] + [
            self._rsc_key_for_target(target)
            for target in invalid_dependencies_without_jar_metacps
          ],
          self._size_estimator(compile_context_pair[1].sources),
          # NB: right now, only the last job will write to the cache, because we don't
          #     do multiple cache entries per target-task tuple.
          on_success=ivts.update,
          on_failure=ivts.force_invalidate,
        )
      )
    elif self._only_zinc_compileable(compile_target):
      # write to both rsc classpath and runtime classpath
      class CompositeProductAdder(object):
        def __init__(self, runtime_classpath_product, rsc_classpath_product):
          self.rsc_classpath_product = rsc_classpath_product
          self.runtime_classpath_product = runtime_classpath_product

        def add_for_target(self, *args, **kwargs):
          self.runtime_classpath_product.add_for_target(*args, **kwargs)
          self.rsc_classpath_product.add_for_target(*args, **kwargs)

      full_key = self._compile_against_rsc_key_for_target(compile_target)
      zinc_jobs.append(
        Job(
          full_key,
          functools.partial(
            self._default_work_for_vts,
            ivts,
            compile_context_pair[1],
            'runtime_classpath',
            counter,
            compile_contexts,
            CompositeProductAdder(
              runtime_classpath_product,
              self.context.products.get_data('rsc_classpath'))),
          [
            self._compile_against_rsc_key_for_target(target)
            for target in invalid_dependencies_without_jar_metacps],
          self._size_estimator(compile_context_pair[1].sources),
          # NB: right now, only the last job will write to the cache, because we don't
          #     do multiple cache entries per target-task tuple.
          on_success=ivts.update,
          on_failure=ivts.force_invalidate,
        )
      )

    return rsc_jobs + zinc_jobs

  def select_runtime_context(self, ccs):
    return ccs[1]

  def create_compile_context(self, target, target_workdir):
    # workdir layout:
    # rsc/
    #   - index/   -- metacp results
    #   - outline/ -- semanticdbs for the current target as created by rsc
    #   - m.jar    -- reified scala signature jar
    # zinc/
    #   - classes/   -- class files
    #   - z.analysis -- zinc analysis for the target
    #   - z.jar      -- final jar for the target
    #   - zinc_args  -- file containing the used zinc args
    sources = self._compute_sources_for_target(target)
    rsc_dir = os.path.join(target_workdir, "rsc")
    zinc_dir = os.path.join(target_workdir, "zinc")
    return [
      RscCompileContext(
        target=target,
        analysis_file=None,
        classes_dir=None,
        jar_file=None,
        zinc_args_file=None,
        rsc_mjar_file=os.path.join(rsc_dir, 'm.jar'),
        log_dir=os.path.join(rsc_dir, 'logs'),
        sources=sources,
        rsc_index_dir=os.path.join(rsc_dir, 'index'),
        rsc_outline_dir=os.path.join(rsc_dir, 'outline'),
      ),
      CompileContext(
        target=target,
        analysis_file=os.path.join(zinc_dir, 'z.analysis'),
        classes_dir=ClasspathEntry(os.path.join(zinc_dir, 'classes'), None),
        jar_file=ClasspathEntry(os.path.join(zinc_dir, 'z.jar'), None),
        log_dir=os.path.join(zinc_dir, 'logs'),
        zinc_args_file=os.path.join(zinc_dir, 'zinc_args'),
        sources=sources,
      )
    ]

  def _runtool(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), output_dir=None):
    if self.execution_strategy == self.HERMETIC:
      # TODO: accept input_digests as well as files.
      with self.context.new_workunit(tool_name) as wu:
        tool_classpath_abs = self.tool_classpath(tool_name)
        tool_classpath = fast_relpath_collection(tool_classpath_abs)

        pathglobs = list(tool_classpath)
        pathglobs.extend(input_files)
        root = PathGlobsAndRoot(
          PathGlobs(tuple(pathglobs)),
          text_type(get_buildroot()))

        tool_snapshots = self.context._scheduler.capture_snapshots((root,))
        input_files_directory_digest = tool_snapshots[0].directory_digest
        classpath_for_cmd = os.pathsep.join(tool_classpath)
        cmd = [
          distribution.java,
        ]
        cmd.extend(self.get_options().jvm_options)
        cmd.extend(['-cp', classpath_for_cmd])
        cmd.extend([main])
        cmd.extend(args)

        epr = ExecuteProcessRequest(
          argv=tuple(cmd),
          env=dict(),
          input_files=input_files_directory_digest,
          output_files=tuple(),
          output_directories=(output_dir,),
          timeout_seconds=15*60,
          description='run {} for {}'.format(tool_name, tgt)
        )
        res = self.context.execute_process_synchronously_without_raising(
          epr,
          self.name(),
          [WorkUnitLabel.TOOL])

        if res.exit_code != 0:
          raise TaskError(res.stderr)

        if output_dir:
          self.context._scheduler.materialize_directories((
            DirectoryToMaterialize(
              # NB the first element here is the root to materialize into, not the dir to snapshot
              text_type(get_buildroot()),
              res.output_directory_digest),
          ))
          # TODO drop a file containing the digest, named maybe output_dir.digest
        return res
    else:
      with self.context.new_workunit(tool_name) as wu:
        result = self.runjava(classpath=self.tool_classpath(tool_name),
                              main=main,
                              jvm_options=self.get_options().jvm_options,
                              args=args,
                              workunit_name=tool_name,
                              workunit_labels=[WorkUnitLabel.TOOL])
        if result != 0:
          raise TaskError('Running {} failed'.format(tool_name))
        runjava_wu = None
        for c in wu.children:
          if c.name is tool_name:
            runjava_wu = c
            break
        if runjava_wu is None:
          raise Exception('couldnt find work unit for underlying execution')
        return runjava_wu

  def _run_metai_tool(self,
                      distribution,
                      metai_classpath,
                      rsc_index_dir,
                      tgt,
                      extra_input_files=()):
    # TODO have metai write to a different spot than metacp
    # Currently, the metai step depends on the fact that materializing
    # ignores existing files. It should write the files to a different
    # location, either by providing inputs from a different location,
    # or invoking a script that does the copying
    args = [
      '--verbose',
      os.pathsep.join(metai_classpath)
    ]
    self._runtool(
      'scala.meta.cli.Metai',
      'metai',
      args,
      distribution,
      tgt=tgt,
      input_files=tuple(metai_classpath) + tuple(extra_input_files),
      output_dir=rsc_index_dir
    )

  def _collect_metai_classpath(self, metacp_result, classpath_rel, jvm_lib_jars_abs):
    metai_classpath = []
    def desandboxify_pantsd_loc(path):
      # TODO come up with a cleaner way to maybe relpath paths.
      try:
        path = fast_relpath(path, get_buildroot())
      except Exception:
        pass
      pattern = 'process-execution[^{}]+/'.format(re.escape(os.path.sep))
      return re.split(pattern, path)[-1]

    # TODO when these are generated once, we won't need to collect them here.
    metai_classpath.append(desandboxify_pantsd_loc(metacp_result["scalaLibrarySynthetics"]))
    # NB The json is absolute pathed pointing into either the buildroot or
    #    the temp directory of the hermetic build. This relativizes the keys.
    status_elements = {
      desandboxify_pantsd_loc(k): v
      for k,v in metacp_result["status"].items()
    }

    for cp_entry in classpath_rel:
      metai_classpath.append(desandboxify_pantsd_loc(status_elements[cp_entry]))
    for cp_entry in jvm_lib_jars_abs:
      metai_classpath.append(desandboxify_pantsd_loc(status_elements[cp_entry]))
    return metai_classpath
예제 #35
0
class RscCompile(ZincCompile):
  """Compile Scala and Java code to classfiles using Rsc."""

  _name = 'rsc' # noqa
  compiler_name = 'rsc'

  def __init__(self, *args, **kwargs):
    super(RscCompile, self).__init__(*args, **kwargs)
    self._metacp_jars_classpath_product = ClasspathProducts(self.get_options().pants_workdir)

  @classmethod
  def implementation_version(cls):
    return super(RscCompile, cls).implementation_version() + [('RscCompile', 171)]

  @classmethod
  def register_options(cls, register):
    super(RscCompile, cls).register_options(register)

    rsc_toolchain_version = '0.0.0-446-c64e6937'
    scalameta_toolchain_version = '4.0.0'

    cls.register_jvm_tool(
      register,
      'rsc',
      classpath=[
          JarDependency(
              org='com.twitter',
              name='rsc_2.11',
              rev=rsc_toolchain_version,
          ),
      ],
      custom_rules=[
        Shader.exclude_package('rsc', recursive=True),
      ]
    )
    cls.register_jvm_tool(
      register,
      'metacp',
      classpath=[
          JarDependency(
            org='org.scalameta',
            name='metacp_2.11',
            rev=scalameta_toolchain_version,
          ),
      ],
      custom_rules=[
        Shader.exclude_package('scala', recursive=True),
      ]
    )
    cls.register_jvm_tool(
      register,
      'metai',
      classpath=[
          JarDependency(
            org='org.scalameta',
            name='metai_2.11',
            rev=scalameta_toolchain_version,
          ),
      ],
      custom_rules=[
        Shader.exclude_package('scala', recursive=True),
      ]
    )

  # TODO: allow @memoized_method to convert lists into tuples so they can be hashed!
  @memoized_property
  def _nailgunnable_combined_classpath(self):
    """Register all of the component tools of the rsc compile task as a "combined" jvm tool.

    This allows us to invoke their combined classpath in a single nailgun instance (see #7089 and
    #7092). We still invoke their classpaths separately when not using nailgun, however.
    """
    cp = []
    for component_tool_name in ['rsc', 'metai', 'metacp']:
      cp.extend(self.tool_classpath(component_tool_name))
    # Add zinc's classpath so that it can be invoked from the same nailgun instance.
    cp.extend(super(RscCompile, self).get_zinc_compiler_classpath())
    return cp

  # Overrides the normal zinc compiler classpath, which only contains zinc.
  def get_zinc_compiler_classpath(self):
    return self.do_for_execution_strategy_variant({
      self.HERMETIC: lambda: super(RscCompile, self).get_zinc_compiler_classpath(),
      self.SUBPROCESS: lambda: super(RscCompile, self).get_zinc_compiler_classpath(),
      self.NAILGUN: lambda: self._nailgunnable_combined_classpath,
    })

  def register_extra_products_from_contexts(self, targets, compile_contexts):
    super(RscCompile, self).register_extra_products_from_contexts(targets, compile_contexts)
    def pathglob_for(filename):
      return PathGlobsAndRoot(
        PathGlobs(
          (fast_relpath_optional(filename, get_buildroot()),)),
        text_type(get_buildroot()))

    def to_classpath_entries(paths, scheduler):
      # list of path ->
      # list of (path, optional<digest>) ->
      path_and_digests = [(p, load_digest(os.path.dirname(p))) for p in paths]
      # partition: list of path, list of tuples
      paths_without_digests = [p for (p, d) in path_and_digests if not d]
      if paths_without_digests:
        self.context.log.debug('Expected to find digests for {}, capturing them.'
                               .format(paths_without_digests))
      paths_with_digests = [(p, d) for (p, d) in path_and_digests if d]
      # list of path -> list path, captured snapshot -> list of path with digest
      snapshots = scheduler.capture_snapshots(tuple(pathglob_for(p) for p in paths_without_digests))
      captured_paths_and_digests = [(p, s.directory_digest)
                                    for (p, s) in zip(paths_without_digests, snapshots)]
      # merge and classpath ify
      return [ClasspathEntry(p, d) for (p, d) in paths_with_digests + captured_paths_and_digests]

    def confify(entries):
      return [(conf, e) for e in entries for conf in self._confs]

    for target in targets:
      rsc_cc, compile_cc = compile_contexts[target]
      if self._only_zinc_compilable(target):
        self.context.products.get_data('rsc_classpath').add_for_target(
          compile_cc.target,
          confify([compile_cc.jar_file])
        )
      elif self._rsc_compilable(target):
        self.context.products.get_data('rsc_classpath').add_for_target(
          rsc_cc.target,
          confify(to_classpath_entries([rsc_cc.rsc_mjar_file], self.context._scheduler)))
      elif self._metacpable(target):
        # Walk the metacp results dir and add classpath entries for all the files there.
        # TODO exercise this with a test.
        # TODO, this should only list the files/directories in the first directory under the index dir

        elements_in_index_dir = [os.path.join(rsc_cc.rsc_index_dir, s)
                                 for s in os.listdir(rsc_cc.rsc_index_dir)]

        entries = to_classpath_entries(elements_in_index_dir, self.context._scheduler)
        self._metacp_jars_classpath_product.add_for_target(
          rsc_cc.target, confify(entries))
      else:
        pass

  def _metacpable(self, target):
    return isinstance(target, JarLibrary)

  def _rsc_compilable(self, target):
    return target.has_sources('.scala') and not target.has_sources('.java')

  def _only_zinc_compilable(self, target):
    return target.has_sources('.java')

  def _is_scala_core_library(self, target):
    return target.address.spec in ('//:scala-library', '//:scala-library-synthetic')

  def create_empty_extra_products(self):
    super(RscCompile, self).create_empty_extra_products()

    compile_classpath = self.context.products.get_data('compile_classpath')
    classpath_product = self.context.products.get_data('rsc_classpath')
    if not classpath_product:
      self.context.products.get_data('rsc_classpath', compile_classpath.copy)
    else:
      classpath_product.update(compile_classpath)

  def select(self, target):
    # Require that targets are marked for JVM compilation, to differentiate from
    # targets owned by the scalajs contrib module.
    if self._metacpable(target):
      return True
    if not isinstance(target, JvmTarget):
      return False
    return self._only_zinc_compilable(target) or self._rsc_compilable(target)

  def _rsc_key_for_target(self, compile_target):
    if self._only_zinc_compilable(compile_target):
      # rsc outlining with java dependencies depends on the output of a second metacp job.
      return self._metacp_key_for_target(compile_target)
    elif self._rsc_compilable(compile_target):
      return "rsc({})".format(compile_target.address.spec)
    elif self._metacpable(compile_target):
      return self._metacp_key_for_target(compile_target)
    else:
      raise TaskError('unexpected target for compiling with rsc .... {}'.format(compile_target))

  def _metacp_dep_key_for_target(self, compile_target):
    if self._only_zinc_compilable(compile_target):
      # rsc outlining with java dependencies depends on the output of a second metacp job.
      return self._metacp_key_for_target(compile_target)
    elif self._rsc_compilable(compile_target):
      return self._compile_against_rsc_key_for_target(compile_target)
    elif self._metacpable(compile_target):
      return self._metacp_key_for_target(compile_target)
    else:
      raise TaskError('unexpected target for compiling with rsc .... {}'.format(compile_target))

  def _metacp_key_for_target(self, compile_target):
    return "metacp({})".format(compile_target.address.spec)

  def _compile_against_rsc_key_for_target(self, compile_target):
    return "compile_against_rsc({})".format(compile_target.address.spec)

  def pre_compile_jobs(self, counter):

    # Create a target for the jdk outlining so that it'll only be done once per run.
    target = Target('jdk', Address('', 'jdk'), self.context.build_graph)
    index_dir = os.path.join(self.versioned_workdir, '--jdk--', 'index')

    def work_for_vts_rsc_jdk():
      distribution = self._get_jvm_distribution()
      jvm_lib_jars_abs = distribution.find_libs(['rt.jar', 'dt.jar', 'jce.jar', 'tools.jar'])
      self._jvm_lib_jars_abs = jvm_lib_jars_abs

      metacp_inputs = tuple(jvm_lib_jars_abs)

      counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      self.context.log.info(
        counter_str,
        'Metacp-ing ',
        items_to_report_element(metacp_inputs, 'jar'),
        ' in the jdk')

      # NB: Metacp doesn't handle the existence of possibly stale semanticdb jars,
      # so we explicitly clean the directory to keep it happy.
      safe_mkdir(index_dir, clean=True)

      with Timer() as timer:
        # Step 1: Convert classpath to SemanticDB
        # ---------------------------------------
        rsc_index_dir = fast_relpath(index_dir, get_buildroot())
        args = [
          '--verbose',
          # NB: The directory to dump the semanticdb jars generated by metacp.
          '--out', rsc_index_dir,
          os.pathsep.join(metacp_inputs),
        ]
        metacp_wu = self._runtool(
          'scala.meta.cli.Metacp',
          'metacp',
          args,
          distribution,
          tgt=target,
          input_files=tuple(
            # NB: no input files because the jdk is expected to exist on the system in a known
            #     location.
            #     Related: https://github.com/pantsbuild/pants/issues/6416
          ),
          output_dir=rsc_index_dir)
        metacp_stdout = stdout_contents(metacp_wu)
        metacp_result = json.loads(metacp_stdout)

        metai_classpath = self._collect_metai_classpath(metacp_result, jvm_lib_jars_abs)

        # Step 1.5: metai Index the semanticdbs
        # -------------------------------------
        self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt=target)

        self._jvm_lib_metacp_classpath = [os.path.join(get_buildroot(), x) for x in metai_classpath]

      self._record_target_stats(target,
        len(self._jvm_lib_metacp_classpath),
        len([]),
        timer.elapsed,
        False,
        'metacp'
      )

    return [
      Job(
        'metacp(jdk)',
        functools.partial(
          work_for_vts_rsc_jdk
        ),
        [],
        self._size_estimator([]),
      ),
    ]

  def create_compile_jobs(self,
                          compile_target,
                          compile_contexts,
                          invalid_dependencies,
                          ivts,
                          counter,
                          runtime_classpath_product):

    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target
      tgt, = vts.targets

      if not hit_cache:
        counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        # This does the following
        # - collect jar dependencies and metacp-classpath entries for them
        # - collect the non-java targets and their classpath entries
        # - break out java targets and their javac'd classpath entries
        # metacp
        # - metacp the java targets
        # rsc
        # - combine the metacp outputs for jars, previous scala targets and the java metacp
        #   classpath
        # - run Rsc on the current target with those as dependencies

        dependencies_for_target = list(
          DependencyContext.global_instance().dependencies_respecting_strict_deps(target))

        jar_deps = [t for t in dependencies_for_target if isinstance(t, JarLibrary)]

        def is_java_compile_target(t):
          return isinstance(t, JavaLibrary) or t.has_sources('.java')
        java_deps = [t for t in dependencies_for_target
                     if is_java_compile_target(t)]
        non_java_deps = [t for t in dependencies_for_target
                         if not (is_java_compile_target(t)) and not isinstance(t, JarLibrary)]

        metacped_jar_classpath_abs = _paths_from_classpath(
          self._metacp_jars_classpath_product.get_for_targets(jar_deps + java_deps)
        )
        metacped_jar_classpath_abs.extend(self._jvm_lib_metacp_classpath)
        metacped_jar_classpath_rel = fast_relpath_collection(metacped_jar_classpath_abs)

        non_java_paths = _paths_from_classpath(
          self.context.products.get_data('rsc_classpath').get_for_targets(non_java_deps),
          collection_type=set)
        non_java_rel = fast_relpath_collection(non_java_paths)

        ctx.ensure_output_dirs_exist()

        distribution = self._get_jvm_distribution()
        with Timer() as timer:
          # Outline Scala sources into SemanticDB
          # ---------------------------------------------
          rsc_mjar_file = fast_relpath(ctx.rsc_mjar_file, get_buildroot())

          # TODO remove non-rsc entries from non_java_rel in a better way
          rsc_semanticdb_classpath = metacped_jar_classpath_rel + \
                                     [j for j in non_java_rel if 'compile/rsc/' in j]
          target_sources = ctx.sources
          args = [
                   '-cp', os.pathsep.join(rsc_semanticdb_classpath),
                   '-d', rsc_mjar_file,
                 ] + target_sources
          sources_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            input_files=tuple(rsc_semanticdb_classpath),
            input_digest=sources_snapshot.directory_digest,
            output_dir=os.path.dirname(rsc_mjar_file))

        self._record_target_stats(tgt,
          len(rsc_semanticdb_classpath),
          len(target_sources),
          timer.elapsed,
          False,
          'rsc'
        )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)

    def work_for_vts_metacp(vts, ctx, classpath_product_key):
      metacp_dependencies_entries = self._zinc.compile_classpath_entries(
        classpath_product_key,
        ctx.target,
        extra_cp_entries=self._extra_compile_time_classpath)

      metacp_dependencies = fast_relpath_collection(c.path for c in metacp_dependencies_entries)


      metacp_dependencies_digests = [c.directory_digest for c in metacp_dependencies_entries
                                     if c.directory_digest]
      metacp_dependencies_paths_without_digests = fast_relpath_collection(
        c.path for c in metacp_dependencies_entries if not c.directory_digest)

      classpath_entries = [
        cp_entry for (conf, cp_entry) in
        self.context.products.get_data(classpath_product_key).get_classpath_entries_for_targets(
          [ctx.target])
      ]
      classpath_digests = [c.directory_digest for c in classpath_entries if c.directory_digest]
      classpath_paths_without_digests = fast_relpath_collection(
        c.path for c in classpath_entries if not c.directory_digest)

      classpath_abs = [c.path for c in classpath_entries]
      classpath_rel = fast_relpath_collection(classpath_abs)

      metacp_inputs = []
      metacp_inputs.extend(classpath_rel)

      counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      self.context.log.info(
        counter_str,
        'Metacp-ing ',
        items_to_report_element(metacp_inputs, 'jar'),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        ctx.target.address.spec,
        ').')

      ctx.ensure_output_dirs_exist()

      tgt, = vts.targets
      with Timer() as timer:
        # Step 1: Convert classpath to SemanticDB
        # ---------------------------------------
        rsc_index_dir = fast_relpath(ctx.rsc_index_dir, get_buildroot())
        args = [
          '--verbose',
          '--stub-broken-signatures',
          '--dependency-classpath', os.pathsep.join(
            metacp_dependencies +
            fast_relpath_collection(self._jvm_lib_jars_abs)
          ),
          # NB: The directory to dump the semanticdb jars generated by metacp.
          '--out', rsc_index_dir,
          os.pathsep.join(metacp_inputs),
        ]

        # NB: If we're building a scala library jar,
        #     also request that metacp generate the indices
        #     for the scala synthetics.
        if self._is_scala_core_library(tgt):
          args = [
            '--include-scala-library-synthetics',
          ] + args
        distribution = self._get_jvm_distribution()

        input_digest = self.context._scheduler.merge_directories(
          tuple(classpath_digests + metacp_dependencies_digests))

        metacp_wu = self._runtool(
          'scala.meta.cli.Metacp',
          'metacp',
          args,
          distribution,
          tgt=tgt,
          input_digest=input_digest,
          input_files=tuple(classpath_paths_without_digests +
                            metacp_dependencies_paths_without_digests),
          output_dir=rsc_index_dir)
        metacp_result = json.loads(stdout_contents(metacp_wu))

        metai_classpath = self._collect_metai_classpath(metacp_result, classpath_rel)

        # Step 1.5: metai Index the semanticdbs
        # -------------------------------------
        self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt)

        abs_output = [(conf, os.path.join(get_buildroot(), x))
                      for conf in self._confs for x in metai_classpath]

        self._metacp_jars_classpath_product.add_for_target(
          ctx.target,
          abs_output,
        )

      self._record_target_stats(tgt,
          len(abs_output),
          len([]),
          timer.elapsed,
          False,
          'metacp'
        )

    rsc_jobs = []
    zinc_jobs = []

    # Invalidated targets are a subset of relevant targets: get the context for this one.
    compile_target = ivts.target
    compile_context_pair = compile_contexts[compile_target]

    # Create the rsc job.
    # Currently, rsc only supports outlining scala.
    if self._only_zinc_compilable(compile_target):
      pass
    elif self._rsc_compilable(compile_target):
      rsc_key = self._rsc_key_for_target(compile_target)
      rsc_jobs.append(
        Job(
          rsc_key,
          functools.partial(
            work_for_vts_rsc,
            ivts,
            compile_context_pair[0]),
          [self._rsc_key_for_target(target) for target in invalid_dependencies] + ['metacp(jdk)'],
          self._size_estimator(compile_context_pair[0].sources),
        )
      )
    elif self._metacpable(compile_target):
      rsc_key = self._rsc_key_for_target(compile_target)
      rsc_jobs.append(
        Job(
          rsc_key,
          functools.partial(
            work_for_vts_metacp,
            ivts,
            compile_context_pair[0],
            'compile_classpath'),
          [self._rsc_key_for_target(target) for target in invalid_dependencies] + ['metacp(jdk)'],
          self._size_estimator(compile_context_pair[0].sources),
          on_success=ivts.update,
          on_failure=ivts.force_invalidate,
        )
      )
    else:
      raise TaskError("Unexpected target for rsc compile {} with type {}"
        .format(compile_target, type(compile_target)))

    # Create the zinc compile jobs.
    # - Scala zinc compile jobs depend on the results of running rsc on the scala target.
    # - Java zinc compile jobs depend on the zinc compiles of their dependencies, because we can't
    #   generate mjars that make javac happy at this point.

    invalid_dependencies_without_jar_metacps = [t for t in invalid_dependencies
      if not self._metacpable(t)]
    if self._rsc_compilable(compile_target):
      full_key = self._compile_against_rsc_key_for_target(compile_target)
      zinc_jobs.append(
        Job(
          full_key,
          functools.partial(
            self._default_work_for_vts,
            ivts,
            compile_context_pair[1],
            'rsc_classpath',
            counter,
            compile_contexts,
            runtime_classpath_product),
          [
            self._rsc_key_for_target(compile_target)
          ] + [
            self._rsc_key_for_target(target)
            for target in invalid_dependencies_without_jar_metacps
          ] + [
            'metacp(jdk)'
          ],
          self._size_estimator(compile_context_pair[1].sources),
          # NB: right now, only the last job will write to the cache, because we don't
          #     do multiple cache entries per target-task tuple.
          on_success=ivts.update,
          on_failure=ivts.force_invalidate,
        )
      )
    elif self._only_zinc_compilable(compile_target):
      # write to both rsc classpath and runtime classpath
      class CompositeProductAdder(object):
        def __init__(self, runtime_classpath_product, rsc_classpath_product):
          self.rsc_classpath_product = rsc_classpath_product
          self.runtime_classpath_product = runtime_classpath_product

        def add_for_target(self, *args, **kwargs):
          self.runtime_classpath_product.add_for_target(*args, **kwargs)
          self.rsc_classpath_product.add_for_target(*args, **kwargs)

      zinc_key = self._compile_against_rsc_key_for_target(compile_target)
      zinc_jobs.append(
        Job(
          zinc_key,
          functools.partial(
            self._default_work_for_vts,
            ivts,
            compile_context_pair[1],
            'runtime_classpath',
            counter,
            compile_contexts,
            CompositeProductAdder(
              runtime_classpath_product,
              self.context.products.get_data('rsc_classpath'))),
          [
            self._compile_against_rsc_key_for_target(target)
            for target in invalid_dependencies_without_jar_metacps],
          self._size_estimator(compile_context_pair[1].sources),
          on_failure=ivts.force_invalidate,
        )
      )

      metacp_key = self._metacp_key_for_target(compile_target)
      rsc_jobs.append(
        Job(
          metacp_key,
          functools.partial(
            work_for_vts_metacp,
            ivts,
            compile_context_pair[0],
            'runtime_classpath'),
            [self._metacp_dep_key_for_target(target) for target in invalid_dependencies] + [
              'metacp(jdk)',
              zinc_key,
            ],
          self._size_estimator(compile_context_pair[0].sources),
          on_success=ivts.update,
          on_failure=ivts.force_invalidate,
        )
      )

    return rsc_jobs + zinc_jobs

  def select_runtime_context(self, ccs):
    return ccs[1]

  def create_compile_context(self, target, target_workdir):
    # workdir layout:
    # rsc/
    #   - index/   -- metacp results
    #   - outline/ -- semanticdbs for the current target as created by rsc
    #   - m.jar    -- reified scala signature jar
    # zinc/
    #   - classes/   -- class files
    #   - z.analysis -- zinc analysis for the target
    #   - z.jar      -- final jar for the target
    #   - zinc_args  -- file containing the used zinc args
    sources = self._compute_sources_for_target(target)
    rsc_dir = os.path.join(target_workdir, "rsc")
    zinc_dir = os.path.join(target_workdir, "zinc")
    return [
      RscCompileContext(
        target=target,
        analysis_file=None,
        classes_dir=None,
        jar_file=None,
        zinc_args_file=None,
        rsc_mjar_file=os.path.join(rsc_dir, 'm.jar'),
        log_dir=os.path.join(rsc_dir, 'logs'),
        sources=sources,
        rsc_index_dir=os.path.join(rsc_dir, 'index'),
      ),
      CompileContext(
        target=target,
        analysis_file=os.path.join(zinc_dir, 'z.analysis'),
        classes_dir=ClasspathEntry(os.path.join(zinc_dir, 'classes'), None),
        jar_file=ClasspathEntry(os.path.join(zinc_dir, 'z.jar'), None),
        log_dir=os.path.join(zinc_dir, 'logs'),
        zinc_args_file=os.path.join(zinc_dir, 'zinc_args'),
        sources=sources,
      )
    ]

  def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    tool_classpath_abs = self.tool_classpath(tool_name)
    tool_classpath = fast_relpath_collection(tool_classpath_abs)

    classpath_for_cmd = os.pathsep.join(tool_classpath)
    cmd = [
      distribution.java,
    ]
    cmd.extend(self.get_options().jvm_options)
    cmd.extend(['-cp', classpath_for_cmd])
    cmd.extend([main])
    cmd.extend(args)

    pathglobs = list(tool_classpath)
    pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f) for f in input_files)

    if pathglobs:
      root = PathGlobsAndRoot(
      PathGlobs(tuple(pathglobs)),
      text_type(get_buildroot()))
      # dont capture snapshot, if pathglobs is empty
      path_globs_input_digest = self.context._scheduler.capture_snapshots((root,))[0].directory_digest

    if path_globs_input_digest and input_digest:
      epr_input_files = self.context._scheduler.merge_directories(
          (path_globs_input_digest, input_digest))
    else:
      epr_input_files = path_globs_input_digest or input_digest

    epr = ExecuteProcessRequest(
      argv=tuple(cmd),
      input_files=epr_input_files,
      output_files=tuple(),
      output_directories=(output_dir,),
      timeout_seconds=15*60,
      description='run {} for {}'.format(tool_name, tgt),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying_dist`
      jdk_home=text_type(self._zinc.underlying_dist.home),
    )
    res = self.context.execute_process_synchronously_without_raising(
      epr,
      self.name(),
      [WorkUnitLabel.TOOL])

    if res.exit_code != 0:
      raise TaskError(res.stderr)

    if output_dir:
      dump_digest(output_dir, res.output_directory_digest)
      self.context._scheduler.materialize_directories((
        DirectoryToMaterialize(
          # NB the first element here is the root to materialize into, not the dir to snapshot
          text_type(get_buildroot()),
          res.output_directory_digest),
      ))
      # TODO drop a file containing the digest, named maybe output_dir.digest
    return res

  # The classpath is parameterized so that we can have a single nailgun instance serving all of our
  # execution requests.
  def _runtool_nonhermetic(self, parent_workunit, classpath, main, tool_name, args, distribution):
    result = self.runjava(classpath=classpath,
                          main=main,
                          jvm_options=self.get_options().jvm_options,
                          args=args,
                          workunit_name=tool_name,
                          workunit_labels=[WorkUnitLabel.TOOL],
                          dist=distribution
    )
    if result != 0:
      raise TaskError('Running {} failed'.format(tool_name))
    runjava_workunit = None
    for c in parent_workunit.children:
      if c.name is tool_name:
        runjava_workunit = c
        break
    # TODO: figure out and document when would this happen.
    if runjava_workunit is None:
      raise Exception('couldnt find work unit for underlying execution')
    return runjava_workunit

  def _runtool(self, main, tool_name, args, distribution,
               tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    with self.context.new_workunit(tool_name) as wu:
      return self.do_for_execution_strategy_variant({
        self.HERMETIC: lambda: self._runtool_hermetic(
          main, tool_name, args, distribution,
          tgt=tgt, input_files=input_files, input_digest=input_digest, output_dir=output_dir),
        self.SUBPROCESS: lambda: self._runtool_nonhermetic(
          wu, self.tool_classpath(tool_name), main, tool_name, args, distribution),
        self.NAILGUN: lambda: self._runtool_nonhermetic(
          wu, self._nailgunnable_combined_classpath, main, tool_name, args, distribution),
      })

  def _run_metai_tool(self,
                      distribution,
                      metai_classpath,
                      rsc_index_dir,
                      tgt,
                      extra_input_files=()):
    # TODO have metai write to a different spot than metacp
    # Currently, the metai step depends on the fact that materializing
    # ignores existing files. It should write the files to a different
    # location, either by providing inputs from a different location,
    # or invoking a script that does the copying
    args = [
      '--verbose',
      os.pathsep.join(metai_classpath)
    ]
    self._runtool(
      'scala.meta.cli.Metai',
      'metai',
      args,
      distribution,
      tgt=tgt,
      input_files=tuple(metai_classpath) + tuple(extra_input_files),
      output_dir=rsc_index_dir
    )

  def _collect_metai_classpath(self, metacp_result, relative_input_paths):
    metai_classpath = []

    relative_workdir = fast_relpath(
      self.context.options.for_global_scope().pants_workdir,
      get_buildroot())
    # NB The json uses absolute paths pointing into either the buildroot or
    #    the temp directory of the hermetic build. This relativizes the keys.
    #    TODO remove this after https://github.com/scalameta/scalameta/issues/1791 is released
    desandboxify = _create_desandboxify_fn(
      [
        os.path.join(relative_workdir, 'resolve', 'coursier', '[^/]*', 'cache', '.*'),
        os.path.join(relative_workdir, 'resolve', 'ivy', '[^/]*', 'ivy', 'jars', '.*'),
        os.path.join(relative_workdir, 'compile', 'rsc', '.*'),
        os.path.join(relative_workdir, r'\.jdk', '.*'),
        os.path.join(r'\.jdk', '.*'),
      ]
      )

    status_elements = {
      desandboxify(k): desandboxify(v)
      for k,v in metacp_result["status"].items()
    }

    for cp_entry in relative_input_paths:
      metai_classpath.append(status_elements[cp_entry])

    scala_lib_synthetics = metacp_result["scalaLibrarySynthetics"]
    if scala_lib_synthetics:
      metai_classpath.append(desandboxify(scala_lib_synthetics))

    return metai_classpath

  def _get_jvm_distribution(self):
    # TODO We may want to use different jvm distributions depending on what
    # java version the target expects to be compiled against.
    # See: https://github.com/pantsbuild/pants/issues/6416 for covering using
    #      different jdks in remote builds.
    local_distribution = JvmPlatform.preferred_jvm_distribution([], strict=True)
    if self.execution_strategy == self.HERMETIC and self.get_options().remote_execution_server:
      class HermeticDistribution(object):
        def __init__(self, home_path, distribution):
          self._underlying = distribution
          self._home = home_path

        def find_libs(self, names):
          underlying_libs = self._underlying.find_libs(names)
          return [self._rehome(l) for l in underlying_libs]

        @property
        def java(self):
          return os.path.join(self._home, 'bin', 'java')

        def _rehome(self, l):
          return os.path.join(self._home, l[len(self._underlying.home)+1:])

      return HermeticDistribution('.jdk', local_distribution)
    else:
      return local_distribution

  def _on_invalid_compile_dependency(self, dep, compile_target):
    """Decide whether to continue searching for invalid targets to use in the execution graph.

    If a necessary dep is a Scala dep and the root is Java, continue to recurse because
    otherwise we'll drop the path between Zinc compile of the Java target and a Zinc
    compile of a transitive Scala dependency.

    This is only an issue for graphs like J -> S1 -> S2, where J is a Java target,
    S1/2 are Scala targets and S2 must be on the classpath to compile J successfully.
    """
    if dep.has_sources('.scala') and compile_target.has_sources('.java'):
      return True
    else:
      return False
  def test_create_canonical_classpath(self):
    a = self.make_target('a/b', JvmTarget)

    jar_path = 'ivy/jars/org.x/lib/x-1.0.jar'
    classpath_products = ClasspathProducts(self.pants_workdir)

    resolved_jar = ResolvedJar(M2Coordinate(org='org', name='x', rev='1.0'),
                               cache_path='somewhere',
                               pants_path=self._create_file(jar_path))

    classpath_products.add_for_target(a, [('default', self._create_file('a.jar')),
                                          ('default', self._create_file('resources'))])
    classpath_products.add_jars_for_targets([a], 'default', [resolved_jar])

    with temporary_dir() as base_dir:
      self._test_canonical_classpath_helper(classpath_products,
                                            [a],
                                            base_dir,
                                            [
                                              'a.b.b-0.jar',
                                              'a.b.b-1',
                                              'a.b.b-2.jar',
                                            ],
                                            {
                                              'a.b.b-classpath.txt':
                                                '{}/a.jar:{}/resources:{}/{}\n'
                                            .format(self.pants_workdir, self.pants_workdir,
                                                    self.pants_workdir, jar_path)
                                            },
                                            excludes={Exclude(org='org', name='y')})

    # incrementally delete the resource dendendency
    classpath_products = ClasspathProducts(self.pants_workdir)
    classpath_products.add_for_target(a, [('default', self._create_file('a.jar'))])
    self._test_canonical_classpath_helper(classpath_products,
                                          [a],
                                          base_dir,
                                          [
                                            'a.b.b-0.jar',
                                          ],
                                          {
                                            'a.b.b-classpath.txt':
                                              '{}/a.jar\n'.format(self.pants_workdir)
                                          })

    # incrementally add another jar dependency
    classpath_products = ClasspathProducts(self.pants_workdir)
    classpath_products.add_for_target(a, [('default', self._create_file('a.jar')),
                                          ('default', self._create_file('b.jar'))])
    self._test_canonical_classpath_helper(classpath_products,
                                          [a],
                                          base_dir,
                                          [
                                            'a.b.b-0.jar',
                                            'a.b.b-1.jar'
                                          ],
                                          {
                                            'a.b.b-classpath.txt':
                                              '{}/a.jar:{}/b.jar\n'.format(self.pants_workdir,
                                                                           self.pants_workdir)
                                          })