Ejemplo n.º 1
0
  def _handle_duplicate_sources(self, vt, sources):
    """Handles duplicate sources generated by the given gen target by either failure or deletion.

    This method should be called after all dependencies have been injected into the graph, but
    before injecting the synthetic version of this target.

    Returns a boolean indicating whether it modified the underlying filesystem.

    NB(gm): Some code generators may re-generate code that their dependent libraries generate.
    This results in targets claiming to generate sources that they really don't, so we try to
    filter out sources that were actually generated by dependencies of the target. This causes
    the code generated by the dependencies to 'win' over the code generated by dependees. By
    default, this behavior is disabled, and duplication in generated sources will raise a
    TaskError. This is controlled by the --allow-dups flag.
    """
    target = vt.target
    target_workdir = vt.results_dir

    # Walk dependency gentargets and record any sources owned by those targets that are also
    # owned by this target.
    duplicates_by_target = OrderedDict()
    def record_duplicates(dep):
      if dep == target or not self.is_gentarget(dep.concrete_derived_from):
        return False
      duped_sources = [s for s in dep.sources_relative_to_source_root() if s in sources.files and
                       not self.ignore_dup(target, dep, s)]
      if duped_sources:
        duplicates_by_target[dep] = duped_sources
    target.walk(record_duplicates)

    # If there were no dupes, we're done.
    if not duplicates_by_target:
      return False

    # If there were duplicates warn or error.
    messages = ['{target} generated sources that had already been generated by dependencies.'
                .format(target=target.address.spec)]
    for dep, duped_sources in duplicates_by_target.items():
      messages.append('\t{} also generated:'.format(dep.concrete_derived_from.address.spec))
      messages.extend(['\t\t{}'.format(source) for source in duped_sources])
    message = '\n'.join(messages)
    if self.get_options().allow_dups:
      logger.warn(message)
    else:
      raise self.DuplicateSourceError(message)

    did_modify = False

    # Finally, remove duplicates from the workdir. This prevents us from having to worry
    # about them during future incremental compiles.
    for dep, duped_sources in duplicates_by_target.items():
      for duped_source in duped_sources:
        safe_delete(os.path.join(target_workdir, duped_source))
        did_modify = True
    if did_modify:
      Digest.clear(vt.current_results_dir)
    return did_modify
Ejemplo n.º 2
0
 def _double_check_cache_for_vts(self, vts, zinc_compile_context):
     # Double check the cache before beginning compilation
     if self.check_cache(vts):
         self.context.log.debug(
             f"Snapshotting results for {vts.target.address.spec}")
         classpath_entry = self._classpath_for_context(zinc_compile_context)
         relpath = fast_relpath(classpath_entry.path, get_buildroot())
         (classes_dir_snapshot,
          ) = self.context._scheduler.capture_snapshots([
              PathGlobsAndRoot(PathGlobs([relpath]), get_buildroot(),
                               Digest.load(relpath))
          ])
         classpath_entry.hydrate_missing_directory_digest(
             classes_dir_snapshot.directory_digest)
         # Re-validate the vts!
         vts.update()
Ejemplo n.º 3
0
def test_streaming_output_changed(caplog) -> None:
    caplog.set_level(logging.DEBUG)
    changed_digest = Digest(EMPTY_DIGEST.fingerprint, 2)
    changed_snapshot = Snapshot._unsafe_create(changed_digest, [], [])
    result = FmtResult(
        input=EMPTY_SNAPSHOT,
        output=changed_snapshot,
        stdout="stdout",
        stderr="stderr",
        formatter_name="formatter",
    )
    assert result.level() == LogLevel.WARN
    assert result.message() == "formatter made changes."
    assert ["Output from formatter\nstdout\nstderr"] == [
        rec.message for rec in caplog.records if rec.levelno == logging.DEBUG
    ]
Ejemplo n.º 4
0
    def add_directory_digests_for_jars(self, targets_and_jars):
        """For each target, get DirectoryDigests for its jars and return them zipped with the jars.

    :param targets_and_jars: List of tuples of the form (Target, [pants.java.jar.jar_dependency_utils.ResolveJar])
    :return: list[tuple[(Target, list[pants.java.jar.jar_dependency_utils.ResolveJar])]
    """

        targets_and_jars = list(targets_and_jars)

        if not targets_and_jars:
            return targets_and_jars

        jar_paths = []
        for target, jars_to_snapshot in targets_and_jars:
            for jar in jars_to_snapshot:
                jar_paths.append(fast_relpath(jar.pants_path, get_buildroot()))

        # Capture Snapshots for jars, using an optional adjacent digest. Create the digest afterward
        # if it does not exist.
        snapshots = self.context._scheduler.capture_snapshots(
            tuple(
                PathGlobsAndRoot(
                    PathGlobs([jar]),
                    get_buildroot(),
                    Digest.load(jar),
                ) for jar in jar_paths))
        for snapshot, jar_path in zip(snapshots, jar_paths):
            snapshot.directory_digest.dump(jar_path)

        # We want to map back the list[Snapshot] to targets_and_jars
        # We assume that (1) jars_to_snapshot has the same number of ResolveJars as snapshots does Snapshots,
        # and that (2) capture_snapshots preserves ordering.
        digests = [snapshot.directory_digest for snapshot in snapshots]
        digest_iterator = iter(digests)

        snapshotted_targets_and_jars = []
        for target, jars_to_snapshot in targets_and_jars:
            snapshotted_jars = [
                ResolvedJar(coordinate=jar.coordinate,
                            cache_path=jar.cache_path,
                            pants_path=jar.pants_path,
                            directory_digest=next(digest_iterator))
                for jar in jars_to_snapshot
            ]
            snapshotted_targets_and_jars.append((target, snapshotted_jars))

        return snapshotted_targets_and_jars
Ejemplo n.º 5
0
    def register_extra_products_from_contexts(self, targets, compile_contexts):
        super().register_extra_products_from_contexts(targets,
                                                      compile_contexts)

        def confify(entries):
            return [(conf, e) for e in entries for conf in self._confs]

        # Ensure that the jar/rsc jar is on the rsc_mixed_compile_classpath.
        for target in targets:
            merged_cc = compile_contexts[target]
            zinc_cc = merged_cc.zinc_cc
            rsc_cc = merged_cc.rsc_cc
            # Make sure m.jar is digested if it exists when the target is validated.
            if rsc_cc.rsc_jar_file.directory_digest is None and os.path.exists(
                    rsc_cc.rsc_jar_file.path):
                relpath = fast_relpath(rsc_cc.rsc_jar_file.path,
                                       get_buildroot())
                (classes_dir_snapshot,
                 ) = self.context._scheduler.capture_snapshots([
                     PathGlobsAndRoot(
                         PathGlobs([relpath]),
                         get_buildroot(),
                         Digest.load(relpath),
                     ),
                 ])
                rsc_cc.rsc_jar_file.hydrate_missing_directory_digest(
                    classes_dir_snapshot.digest)

            if rsc_cc.workflow is not None:
                cp_entries = match(
                    rsc_cc.workflow,
                    {
                        self.JvmCompileWorkflowType.zinc_only:
                        lambda: confify([self._classpath_for_context(zinc_cc)]
                                        ),
                        self.JvmCompileWorkflowType.zinc_java:
                        lambda: confify([self._classpath_for_context(zinc_cc)]
                                        ),
                        self.JvmCompileWorkflowType.rsc_and_zinc:
                        lambda: confify([rsc_cc.rsc_jar_file]),
                        self.JvmCompileWorkflowType.outline_and_zinc:
                        lambda: confify([rsc_cc.rsc_jar_file]),
                    },
                )()
                self.context.products.get_data(
                    "rsc_mixed_compile_classpath").add_for_target(
                        target, cp_entries)
Ejemplo n.º 6
0
    def _capture_sources(self, vts):
        to_capture = []
        results_dirs = []
        filespecs = []

        for vt in vts:
            target = vt.target
            # Compute the (optional) subdirectory of the results_dir to generate code to. This
            # path will end up in the generated FilesetWithSpec and target, and thus needs to be
            # located below the stable/symlinked `vt.results_dir`.
            synthetic_target_dir = self.synthetic_target_dir(
                target, vt.results_dir)

            files = self.sources_globs

            results_dir_relpath = fast_relpath(synthetic_target_dir,
                                               get_buildroot())
            buildroot_relative_globs = tuple(
                os.path.join(results_dir_relpath, file) for file in files)
            buildroot_relative_excludes = tuple(
                os.path.join(results_dir_relpath, file)
                for file in self.sources_exclude_globs)
            to_capture.append(
                PathGlobsAndRoot(
                    PathGlobs(buildroot_relative_globs,
                              buildroot_relative_excludes),
                    text_type(get_buildroot()),
                    # The digest is stored adjacent to the hash-versioned `vt.current_results_dir`.
                    Digest.load(vt.current_results_dir),
                ))
            results_dirs.append(results_dir_relpath)
            filespecs.append(
                FilesetRelPathWrapper.to_filespec(buildroot_relative_globs))

        snapshots = self.context._scheduler.capture_snapshots(
            tuple(to_capture))

        for snapshot, vt in zip(snapshots, vts):
            snapshot.directory_digest.dump(vt.current_results_dir)

        return tuple(
            EagerFilesetWithSpec(
                results_dir_relpath,
                filespec,
                snapshot,
            ) for (results_dir_relpath, filespec,
                   snapshot) in zip(results_dirs, filespecs, snapshots))
Ejemplo n.º 7
0
def test_output_digest(rule_runner: RuleRunner) -> None:
    process = Process(
        argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
        description="echo roland",
        output_files=("roland", ),
    )
    result = rule_runner.request(ProcessResult, [process])
    assert result.output_digest == Digest(
        fingerprint=
        "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
        serialized_bytes_length=80,
    )

    digest_contents = rule_runner.request(DigestContents,
                                          [result.output_digest])
    assert digest_contents == DigestContents(
        [FileContent("roland", b"European Burmese", False)])
Ejemplo n.º 8
0
def test_multiple_snapshots_from_outside_buildroot(
        rule_runner: RuleRunner) -> None:
    with temporary_dir() as temp_dir:
        Path(temp_dir, "roland").write_text("European Burmese")
        Path(temp_dir, "susannah").write_text("I don't know")
        snapshots = rule_runner.scheduler.capture_snapshots([
            PathGlobsAndRoot(PathGlobs(["roland"]), temp_dir),
            PathGlobsAndRoot(PathGlobs(["susannah"]), temp_dir),
            PathGlobsAndRoot(PathGlobs(["doesnotexist"]), temp_dir),
        ])
    assert len(snapshots) == 3
    assert snapshots[0].files == ("roland", )
    assert snapshots[0].digest == ROLAND_DIGEST
    assert snapshots[1].files == ("susannah", )
    assert snapshots[1].digest == Digest(
        "d3539cfc21eb4bab328ca9173144a8e932c515b1b9e26695454eeedbc5a95f6f", 82)
    assert snapshots[2] == EMPTY_SNAPSHOT
Ejemplo n.º 9
0
  def test_materialize_directories(self):
    # I tried passing in the digest of a file, but it didn't make it to the
    # rust code due to all of the checks we have in place (which is probably a good thing).
    self.prime_store_with_roland_digest()

    with temporary_dir() as temp_dir:
      dir_path = os.path.join(temp_dir, "containing_roland")
      digest = Digest(
        "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
        80
      )
      self.scheduler.materialize_directories((DirectoryToMaterialize(dir_path, digest),))

      created_file = os.path.join(dir_path, "roland")
      with open(created_file, 'r') as f:
        content = f.read()
        self.assertEqual(content, "European Burmese")
Ejemplo n.º 10
0
    def test_passes_eager_fileset_with_spec_through(self):
        self.create_file('foo/foo/a.txt', 'a_contents')

        fileset = self.sources_for(['foo/a.txt'], 'foo')

        sf = SourcesField(sources=fileset)

        self.assertIs(fileset, sf.sources)
        self.assertEqual(['foo/a.txt'], list(sf.source_paths))
        self.assertEqual(['foo/foo/a.txt'], list(sf.relative_to_buildroot()))

        digest = '56001a7e48555f156420099a99da60a7a83acc90853046709341bf9f00a6f944'
        want_snapshot = Snapshot(Digest(digest, 77), ('foo/foo/a.txt', ), ())

        # We explicitly pass a None scheduler because we expect no scheduler lookups to be required
        # in order to get a Snapshot.
        self.assertEqual(sf.snapshot(scheduler=None), want_snapshot)
Ejemplo n.º 11
0
    class Factory(Script):
        options_scope = "download-pex-bin"
        name = "pex"
        default_version = "v2.1.9"

        # Note: You can compute the digest and size using:
        # curl -L https://github.com/pantsbuild/pex/releases/download/vX.Y.Z/pex | tee >(wc -c) >(shasum -a 256) >/dev/null
        default_versions_and_digests = {
            PlatformConstraint.none: ToolForPlatform(
                digest=Digest(
                    "4e2677ce7270dd04d767e93e1904c90aa8c7f4f53b76f3615215970b45d100d7", 2624111
                ),
                version=ToolVersion("v2.1.9"),
            ),
        }

        def get_external_url_generator(self):
            return PexBinUrlGenerator()
Ejemplo n.º 12
0
def test_docker_binary_build_image(docker_path: str,
                                   docker: DockerBinary) -> None:
    dockerfile = "src/test/repo/Dockerfile"
    digest = Digest(sha256().hexdigest(), 123)
    tags = (
        "test:0.1.0",
        "test:latest",
    )
    build_request = docker.build_image(tags, digest, dockerfile)

    assert build_request == Process(
        argv=(docker_path, "build", "-t", tags[0], "-t", tags[1], "-f",
              dockerfile, "."),
        input_digest=digest,
        cache_scope=ProcessCacheScope.PER_SESSION,
        description="",  # The description field is marked `compare=False`
    )
    assert build_request.description == "Building docker image test:0.1.0 +1 additional tag."
Ejemplo n.º 13
0
 def test_download_wrong_digest(self) -> None:
     with self.isolated_local_store():
         with http_server(StubHandler) as port:
             with self.assertRaises(ExecutionError) as cm:
                 self.request_product(
                     Snapshot,
                     [
                         DownloadFile(
                             f"http://localhost:{port}/do_not_remove_or_edit.txt",
                             Digest(
                                 self.pantsbuild_digest.fingerprint,
                                 self.pantsbuild_digest.
                                 serialized_bytes_length + 1,
                             ),
                         )
                     ],
                 )
             assert "wrong digest" in str(cm.exception).lower()
Ejemplo n.º 14
0
class ClocBinary(Script):
    # Note: Not in scope 'cloc' because that's the name of the singleton task that runs cloc.
    options_scope = "cloc-binary"
    name = "cloc"
    default_version = "1.80"

    replaces_scope = "cloc"
    replaces_name = "version"

    default_versions_and_digests = {
        PlatformConstraint.none:
        ToolForPlatform(
            digest=Digest(
                "2b23012b1c3c53bd6b9dd43cd6aa75715eed4feb2cb6db56ac3fbbd2dffeac9d",
                546279),
            version=ToolVersion("1.80"),
        ),
    }
Ejemplo n.º 15
0
    class Factory(Script):
        options_scope = "download-pex-bin"
        name = "pex"
        default_version = "v2.1.6"

        # Note: You can compute the digest and size using:
        # curl -L https://github.com/pantsbuild/pex/releases/download/vX.Y.Z/pex | tee >(wc -c) >(shasum -a 256) >/dev/null
        default_versions_and_digests = {
            PlatformConstraint.none:
            ToolForPlatform(
                digest=Digest(
                    "73e692f9a67a8d8b3f8b246076a3fd99f29fd5cbe18126c69657ac8c99c277fc",
                    2614280),
                version=ToolVersion("v2.1.6"),
            ),
        }

        def get_external_url_generator(self):
            return PexBinUrlGenerator()
Ejemplo n.º 16
0
def test_streaming_output_changed() -> None:
    changed_digest = Digest(EMPTY_DIGEST.fingerprint, 2)
    changed_snapshot = Snapshot._unsafe_create(changed_digest, [], [])
    result = FmtResult(
        input=EMPTY_SNAPSHOT,
        output=changed_snapshot,
        stdout="stdout",
        stderr="stderr",
        formatter_name="formatter",
    )
    assert result.level() == LogLevel.WARN
    assert result.message() == dedent(
        """\
        formatter made changes.
        stdout
        stderr

        """
    )
Ejemplo n.º 17
0
 def test_download(self) -> None:
     with self.isolated_local_store():
         with http_server(StubHandler) as port:
             snapshot = self.request_product(
                 Snapshot,
                 [
                     DownloadFile(
                         f"http://localhost:{port}/do_not_remove_or_edit.txt",
                         self.pantsbuild_digest,
                     )
                 ],
             )
             self.assert_snapshot_equals(
                 snapshot,
                 ["do_not_remove_or_edit.txt"],
                 Digest(
                     "03bb499daabafc60212d2f4b2fab49b47b35b83a90c056224c768d52bce02691",
                     102),
             )
Ejemplo n.º 18
0
    class Factory(Script):
        options_scope = "download-pex-bin"
        name = "pex"
        default_version = "v2.1.7"

        # Note: You can compute the digest and size using:
        # curl -L https://github.com/pantsbuild/pex/releases/download/vX.Y.Z/pex | tee >(wc -c) >(shasum -a 256) >/dev/null
        default_versions_and_digests = {
            PlatformConstraint.none:
            ToolForPlatform(
                digest=Digest(
                    "375ab4a405a6db57f3afd8d60eca666e61931b44f156dc78ac7d8e47bddc96d6",
                    2620451),
                version=ToolVersion("v2.1.7"),
            ),
        }

        def get_external_url_generator(self):
            return PexBinUrlGenerator()
Ejemplo n.º 19
0
 def test_empty(self) -> None:
     """Test that parsing an empty BUILD file results in an empty AddressFamily."""
     address_mapper = AddressMapper(parser=JsonParser(TEST_TABLE), prelude_glob_patterns=())
     af = run_rule(
         parse_address_family,
         rule_args=[address_mapper, BuildFilePreludeSymbols(FrozenDict()), Dir("/dev/null")],
         mock_gets=[
             MockGet(
                 product_type=Snapshot,
                 subject_type=PathGlobs,
                 mock=lambda _: Snapshot(Digest("abc", 10), ("/dev/null/BUILD",), ()),
             ),
             MockGet(
                 product_type=FilesContent,
                 subject_type=Digest,
                 mock=lambda _: FilesContent([FileContent(path="/dev/null/BUILD", content=b"")]),
             ),
         ],
     )
     self.assertEqual(len(af.objects_by_name), 0)
Ejemplo n.º 20
0
  def _capture_sources(self, vts):
    to_capture = []
    results_dirs = []
    filespecs = []

    for vt in vts:
      target = vt.target
      # Compute the (optional) subdirectory of the results_dir to generate code to. This
      # path will end up in the generated FilesetWithSpec and target, and thus needs to be
      # located below the stable/symlinked `vt.results_dir`.
      synthetic_target_dir = self.synthetic_target_dir(target, vt.results_dir)

      files = self.sources_globs

      results_dir_relpath = fast_relpath(synthetic_target_dir, get_buildroot())
      buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files)
      buildroot_relative_excludes = tuple(
        os.path.join(results_dir_relpath, file)
          for file in self.sources_exclude_globs
      )
      to_capture.append(
        PathGlobsAndRoot(
          PathGlobs(buildroot_relative_globs, buildroot_relative_excludes),
          text_type(get_buildroot()),
          # The digest is stored adjacent to the hash-versioned `vt.current_results_dir`.
          Digest.load(vt.current_results_dir),
        )
      )
      results_dirs.append(results_dir_relpath)
      filespecs.append(FilesetRelPathWrapper.to_filespec(buildroot_relative_globs))

    snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture))

    for snapshot, vt in zip(snapshots, vts):
      snapshot.directory_digest.dump(vt.current_results_dir)

    return tuple(EagerFilesetWithSpec(
      results_dir_relpath,
      filespec,
      snapshot,
    ) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots))
Ejemplo n.º 21
0
def test_docker_binary_build_image(docker_path: str,
                                   docker: DockerBinary) -> None:
    dockerfile = "src/test/repo/Dockerfile"
    digest = Digest(sha256().hexdigest(), 123)
    tags = (
        "test:0.1.0",
        "test:latest",
    )
    env = {"DOCKER_HOST": "tcp://127.0.0.1:1234"}
    build_request = docker.build_image(
        tags=tags,
        digest=digest,
        dockerfile=dockerfile,
        build_args=DockerBuildArgs.from_strings("arg1=2"),
        context_root="build/context",
        env=env,
        extra_args=("--pull", "--squash"),
    )

    assert build_request == Process(
        argv=(
            docker_path,
            "build",
            "--pull",
            "--squash",
            "--tag",
            tags[0],
            "--tag",
            tags[1],
            "--build-arg",
            "arg1=2",
            "--file",
            dockerfile,
            "build/context",
        ),
        env=env,
        input_digest=digest,
        cache_scope=ProcessCacheScope.PER_SESSION,
        description="",  # The description field is marked `compare=False`
    )
    assert build_request.description == "Building docker image test:0.1.0 +1 additional tag."
Ejemplo n.º 22
0
    def test_write_file(self):
        request = Process(
            argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
            description="echo roland",
            output_files=("roland",),
        )

        process_result = self.request_product(ProcessResult, [request])

        self.assertEqual(
            process_result.output_digest,
            Digest(
                fingerprint="63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
                serialized_bytes_length=80,
            ),
        )

        digest_contents = self.request_product(DigestContents, [process_result.output_digest])
        assert digest_contents == DigestContents(
            [FileContent("roland", b"European Burmese", False)]
        )
Ejemplo n.º 23
0
def test_snapshot_equality() -> None:
    # Only the digest is used for equality.
    snapshot = Snapshot._unsafe_create(Digest("a" * 64, 1000),
                                       ["f.ext", "dir/f.ext"], ["dir"])
    assert snapshot == Snapshot._unsafe_create(Digest("a" * 64, 1000),
                                               ["f.ext", "dir/f.ext"], ["dir"])
    assert snapshot == Snapshot._unsafe_create(Digest("a" * 64, 1000),
                                               ["f.ext", "dir/f.ext"], ["foo"])
    assert snapshot == Snapshot._unsafe_create(Digest("a" * 64, 1000),
                                               ["f.ext"], ["dir"])
    assert snapshot != Snapshot._unsafe_create(Digest("a" * 64, 0),
                                               ["f.ext", "dir/f.ext"], ["dir"])
    assert snapshot != Snapshot._unsafe_create(Digest("b" * 64, 1000),
                                               ["f.ext", "dir/f.ext"], ["dir"])
    with pytest.raises(TypeError):
        snapshot < snapshot  # type: ignore[operator]
Ejemplo n.º 24
0
    def test_caches_downloads(self) -> None:
        with self.isolated_local_store():
            with http_server(StubHandler) as port:
                self.prime_store_with_roland_digest()

                # This would error if we hit the HTTP server, because 404,
                # but we're not going to hit the HTTP server because it's cached,
                # so we shouldn't see an error...
                url = DownloadFile(
                    f"http://localhost:{port}/roland",
                    FileDigest(
                        "693d8db7b05e99c6b7a7c0616456039d89c555029026936248085193559a0b5d",
                        16),
                )
                snapshot = self.request(Snapshot, [url])
                self.assert_snapshot_equals(
                    snapshot,
                    ["roland"],
                    Digest(
                        "9341f76bef74170bedffe51e4f2e233f61786b7752d21c2339f8ee6070eba819",
                        82),
                )
Ejemplo n.º 25
0
 def test_empty(self):
     """Test that parsing an empty BUILD file results in an empty AddressFamily."""
     address_mapper = AddressMapper(JsonParser(TEST_TABLE))
     af = run_rule(
         parse_address_family,
         rule_args=[address_mapper, Dir('/dev/null')],
         mock_gets=[
             MockGet(
                 product_type=Snapshot,
                 subject_type=PathGlobs,
                 mock=lambda _: Snapshot(Digest('abc', 10),
                                         ('/dev/null/BUILD', ), ()),
             ),
             MockGet(
                 product_type=FilesContent,
                 subject_type=Digest,
                 mock=lambda _: FilesContent(
                     [FileContent(path='/dev/null/BUILD', content=b'')]),
             ),
         ],
     )
     self.assertEqual(len(af.objects_by_name), 0)
Ejemplo n.º 26
0
    def test_good_prelude(self) -> None:
        address_mapper = unittest.mock.Mock()
        address_mapper.prelude_glob_patterns = ("prelude",)

        symbols = run_rule(
            evaluate_preludes,
            rule_args=[address_mapper,],
            mock_gets=[
                MockGet(
                    product_type=Snapshot,
                    subject_type=PathGlobs,
                    mock=lambda _: Snapshot(Digest("abc", 10), ("/dev/null/prelude",), ()),
                ),
                MockGet(
                    product_type=FilesContent,
                    subject_type=Digest,
                    mock=lambda _: FilesContent(
                        [FileContent(path="/dev/null/prelude", content=b"def foo(): return 1")]
                    ),
                ),
            ],
        )
        assert symbols.symbols["foo"]() == 1
Ejemplo n.º 27
0
    def _capture_resources(self, vts):
        """Given a list of VersionedTargets, capture DirectoryDigests for all of them.

        :returns: A list of tuples of VersionedTargets and digests for their content.
        """
        # Capture Snapshots for each directory, using an optional adjacent digest. Create the digest
        # afterward if it does not exist.
        buildroot = get_buildroot()
        snapshots = self.context._scheduler.capture_snapshots(
            tuple(
                PathGlobsAndRoot(
                    PathGlobs([
                        os.path.join(fast_relpath(vt.results_dir, buildroot),
                                     "**")
                    ]),
                    buildroot,
                    Digest.load(vt.current_results_dir),
                ) for vt in vts))
        result = []
        for vt, snapshot in zip(vts, snapshots):
            snapshot.directory_digest.dump(vt.current_results_dir)
            result.append((vt, snapshot.directory_digest))
        return result
Ejemplo n.º 28
0
    def test_illegal_import(self) -> None:
        prelude = dedent("""\
            import os
            def make_target():
                python_library()
            """).encode()

        address_mapper = unittest.mock.Mock()
        address_mapper.prelude_glob_patterns = ("prelude", )
        address_mapper.build_file_imports_behavior = BuildFileImportsBehavior.error

        with self.assertRaisesRegex(
                Exception,
                "Import used in /dev/null/prelude at line 1\\. Import statements are banned"
        ):
            run_rule(
                evalute_preludes,
                rule_args=[
                    address_mapper,
                ],
                mock_gets=[
                    MockGet(
                        product_type=Snapshot,
                        subject_type=PathGlobs,
                        mock=lambda _: Snapshot(Digest("abc", 10),
                                                ("/dev/null/prelude", ), ()),
                    ),
                    MockGet(
                        product_type=FilesContent,
                        subject_type=Digest,
                        mock=lambda _: FilesContent([
                            FileContent(path="/dev/null/prelude",
                                        content=prelude)
                        ]),
                    ),
                ],
            )
Ejemplo n.º 29
0
def run_python_test(target):

  # TODO: Inject versions and digests here through some option, rather than hard-coding it.
  pex_snapshot = yield Get(Snapshot, UrlToFetch("https://github.com/pantsbuild/pex/releases/download/v1.5.2/pex27",
                                                Digest('8053a79a5e9c2e6e9ace3999666c9df910d6289555853210c1bbbfa799c3ecda', 1757011)))

  # TODO: This should be configurable, both with interpreter constraints, and for remote execution.
  python_binary = sys.executable

  argv = [
    './{}'.format(pex_snapshot.files[0].path),
    '-e', 'pytest:main',
    '--python', python_binary,
    # TODO: This is non-hermetic because pytest will be resolved on the fly by pex27, where it should be hermetically provided in some way.
    # We should probably also specify a specific version.
    'pytest',
  ]

  merged_input_files = yield Get(
    Digest,
    MergedDirectories,
    MergedDirectories(directories=(target.adaptor.sources.snapshot.directory_digest, pex_snapshot.directory_digest)),
  )

  request = ExecuteProcessRequest(
    argv=tuple(argv),
    input_files=merged_input_files,
    description='Run pytest for {}'.format(target.address.reference()),
    # TODO: This should not be necessary
    env={'PATH': os.path.dirname(python_binary)}
  )

  result = yield Get(FallibleExecuteProcessResult, ExecuteProcessRequest, request)
  # TODO: Do something with stderr?
  status = Status.SUCCESS if result.exit_code == 0 else Status.FAILURE

  yield PyTestResult(status=status, stdout=str(result.stdout))
Ejemplo n.º 30
0
 def to_classpath_entries(paths, scheduler):
     # list of path ->
     # list of (path, optional<digest>) ->
     path_and_digests = [(p, Digest.load(os.path.dirname(p)))
                         for p in paths]
     # partition: list of path, list of tuples
     paths_without_digests = [p for (p, d) in path_and_digests if not d]
     if paths_without_digests:
         self.context.log.debug(
             'Expected to find digests for {}, capturing them.'.format(
                 paths_without_digests))
     paths_with_digests = [(p, d) for (p, d) in path_and_digests if d]
     # list of path -> list path, captured snapshot -> list of path with digest
     snapshots = scheduler.capture_snapshots(
         tuple(pathglob_for(p) for p in paths_without_digests))
     captured_paths_and_digests = [
         (p, s.directory_digest)
         for (p, s) in zip(paths_without_digests, snapshots)
     ]
     # merge and classpath ify
     return [
         ClasspathEntry(p, d)
         for (p, d) in paths_with_digests + captured_paths_and_digests
     ]
Ejemplo n.º 31
0
 def _snapshot(self) -> Snapshot:
     return Snapshot(Digest("xx", 2), ("root/BUILD", ), ())
Ejemplo n.º 32
0
async def setup_shunit2_for_target(
    request: TestSetupRequest,
    bash_program: BashProgram,
    bash_setup: BashSetup,
    test_subsystem: TestSubsystem,
) -> TestSetup:
    # Because shunit2 is a simple Bash file, we download it using `DownloadFile`. Normally, we
    # would install the test runner through `ExternalTool`. See
    # https://www.pantsbuild.org/v2.0/docs/rules-api-installing-tools and
    # https://www.pantsbuild.org/v2.0/docs/rules-api-file-system.
    shunit2_script_request = Get(
        Digest,
        DownloadFile(
            url=
            "https://raw.githubusercontent.com/kward/shunit2/b9102bb763cc603b3115ed30a5648bf950548097/shunit2",
            expected_digest=Digest(
                "1f11477b7948150d1ca50cdd41d89be4ed2acd137e26d2e0fe23966d0e272cc5",
                40987,
            ),
        ),
    )

    transitive_targets_request = Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.field_set.address]))

    shunit2_script, transitive_targets = await MultiGet(
        shunit2_script_request, transitive_targets_request)

    # We need to include all relevant transitive dependencies in the environment. We also get the
    # test's sources so that we can check that it has `source ./shunit2` at the bottom of it.
    #
    # Because we might modify the test files, we leave the tests out of
    # `dependencies_source_files_request` by using `transitive_targets.dependencies` instead of
    # `transitive_targets.closure`. This makes sure that we don't accidentally include the
    # unmodified test files and the modified test files in the same input. See
    # https://www.pantsbuild.org/v2.0/docs/rules-api-and-target-api.
    dependencies_source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.dependencies),
            for_sources_types=(BashSources, FilesSources, ResourcesSources),
        ),
    )
    test_source_files_request = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))
    dependencies_source_files, test_source_files = await MultiGet(
        dependencies_source_files_request, test_source_files_request)

    # To check if the test files already have `source ./shunit2` in them, we need to look at the
    # actual file content. We use `DigestContents` for this, and then use `CreateDigest` to create
    # a digest of the (possibly) updated test files. See
    # https://www.pantsbuild.org/v2.0/docs/rules-api-file-system.
    #
    # Most test runners don't modify their test files like we do here, so most test runners can
    # skip this step.
    test_files_content = await Get(DigestContents, Digest,
                                   test_source_files.snapshot.digest)
    updated_test_files_content = []
    for file_content in test_files_content:
        if (b"source ./shunit2" in file_content.content
                or b". ./shunit2" in file_content.content):
            updated_test_files_content.append(file_content)
        else:
            updated_file_content = FileContent(
                path=file_content.path,
                content=file_content.content + b"\nsource ./shunit2\n",
            )
            updated_test_files_content.append(updated_file_content)
    updated_test_source_files = await Get(
        Digest, CreateDigest(updated_test_files_content))

    # The Process needs one single `Digest`, so we merge everything together. See
    # https://www.pantsbuild.org/v2.0/docs/rules-api-file-system.
    input_digest = await Get(
        Digest,
        MergeDigests([
            shunit2_script,
            updated_test_source_files,
            dependencies_source_files.snapshot.digest,
        ]),
    )

    # We must check if `test --force` was used, and if so, use a hack to invalidate the cache by
    # mixing in a randomly generated UUID into the environment.
    extra_env = {}
    if test_subsystem.force and not request.is_debug:
        uuid = await Get(UUID, UUIDRequest())
        extra_env["__PANTS_FORCE_TEST_RUN__"] = str(uuid)

    process = Process(
        argv=[bash_program.exe, *test_source_files.snapshot.files],
        input_digest=input_digest,
        description=f"Run shunit2 on {request.field_set.address}.",
        level=LogLevel.DEBUG,
        env=bash_setup.env_dict,
        timeout_seconds=request.field_set.timeout.value,
    )
    return TestSetup(process)