Exemplo n.º 1
0
    def test_merge_directories(self):
        with temporary_dir() as temp_dir:
            with open(os.path.join(temp_dir, "roland"), "w") as f:
                f.write("European Burmese")
            with open(os.path.join(temp_dir, "susannah"), "w") as f:
                f.write("Not sure actually")
            scheduler = self.mk_scheduler(rules=create_fs_rules())
            (empty_snapshot, roland_snapshot, susannah_snapshot,
             both_snapshot) = (scheduler.capture_snapshots((
                 PathGlobsAndRoot(PathGlobs(("doesnotmatch", ), ()), temp_dir),
                 PathGlobsAndRoot(PathGlobs(("roland", ), ()), temp_dir),
                 PathGlobsAndRoot(PathGlobs(("susannah", ), ()), temp_dir),
                 PathGlobsAndRoot(PathGlobs(("*", ), ()), temp_dir),
             )))

            empty_merged = scheduler.merge_directories(
                (empty_snapshot.directory_digest))
            self.assertEquals(
                empty_snapshot.directory_digest,
                empty_merged,
            )

            roland_merged = scheduler.merge_directories((
                roland_snapshot.directory_digest,
                empty_snapshot.directory_digest,
            ))
            self.assertEquals(
                roland_snapshot.directory_digest,
                roland_merged,
            )

            both_merged = scheduler.merge_directories((
                roland_snapshot.directory_digest,
                susannah_snapshot.directory_digest,
            ))

            self.assertEquals(both_snapshot.directory_digest, both_merged)
Exemplo n.º 2
0
    def test_synchronously_merge_directories(self):
        with temporary_dir() as temp_dir:
            with open(os.path.join(temp_dir, "roland"), "w") as f:
                f.write("European Burmese")
            with open(os.path.join(temp_dir, "susannah"), "w") as f:
                f.write("Not sure actually")
            (
                empty_snapshot,
                roland_snapshot,
                susannah_snapshot,
                both_snapshot,
            ) = self.scheduler.capture_snapshots(
                (
                    PathGlobsAndRoot(PathGlobs(["doesnotmatch"]), temp_dir),
                    PathGlobsAndRoot(PathGlobs(["roland"]), temp_dir),
                    PathGlobsAndRoot(PathGlobs(["susannah"]), temp_dir),
                    PathGlobsAndRoot(PathGlobs(["*"]), temp_dir),
                )
            )

            empty_merged = self.scheduler.merge_directories((empty_snapshot.directory_digest,))
            self.assertEqual(
                empty_snapshot.directory_digest, empty_merged,
            )

            roland_merged = self.scheduler.merge_directories(
                (roland_snapshot.directory_digest, empty_snapshot.directory_digest,)
            )
            self.assertEqual(
                roland_snapshot.directory_digest, roland_merged,
            )

            both_merged = self.scheduler.merge_directories(
                (roland_snapshot.directory_digest, susannah_snapshot.directory_digest,)
            )

            self.assertEqual(both_snapshot.directory_digest, both_merged)
Exemplo n.º 3
0
    def test_asynchronously_merge_directories(self):
        with temporary_dir() as temp_dir:
            with open(os.path.join(temp_dir, "roland"), "w") as f:
                f.write("European Burmese")
            with open(os.path.join(temp_dir, "susannah"), "w") as f:
                f.write("Not sure actually")
            (empty_snapshot, roland_snapshot, susannah_snapshot,
             both_snapshot) = (self.scheduler.capture_snapshots((
                 PathGlobsAndRoot(PathGlobs(("doesnotmatch", ), ()), temp_dir),
                 PathGlobsAndRoot(PathGlobs(("roland", ), ()), temp_dir),
                 PathGlobsAndRoot(PathGlobs(("susannah", ), ()), temp_dir),
                 PathGlobsAndRoot(PathGlobs(("*", ), ()), temp_dir),
             )))

            empty_merged = self.request_single_product(
                Digest,
                DirectoriesToMerge((empty_snapshot.directory_digest, )),
            )
            self.assertEqual(empty_snapshot.directory_digest, empty_merged)

            roland_merged = self.request_single_product(
                Digest,
                DirectoriesToMerge((roland_snapshot.directory_digest,
                                    empty_snapshot.directory_digest)),
            )
            self.assertEqual(
                roland_snapshot.directory_digest,
                roland_merged,
            )

            both_merged = self.request_single_product(
                Digest,
                DirectoriesToMerge((roland_snapshot.directory_digest,
                                    susannah_snapshot.directory_digest)),
            )

            self.assertEqual(both_snapshot.directory_digest, both_merged)
Exemplo n.º 4
0
    def test_asynchronously_merge_digests(self) -> None:
        with temporary_dir() as temp_dir:
            Path(temp_dir, "roland").write_text("European Burmese")
            Path(temp_dir, "susannah").write_text("Not sure actually")
            (
                empty_snapshot,
                roland_snapshot,
                susannah_snapshot,
                both_snapshot,
            ) = self.scheduler.capture_snapshots((
                PathGlobsAndRoot(PathGlobs(["doesnotmatch"]), temp_dir),
                PathGlobsAndRoot(PathGlobs(["roland"]), temp_dir),
                PathGlobsAndRoot(PathGlobs(["susannah"]), temp_dir),
                PathGlobsAndRoot(PathGlobs(["*"]), temp_dir),
            ))

            empty_merged = self.request_product(
                Digest, [MergeDigests((empty_snapshot.digest, ))])
            assert empty_snapshot.digest == empty_merged

            roland_merged = self.request_product(
                Digest,
                [
                    MergeDigests(
                        (roland_snapshot.digest, empty_snapshot.digest))
                ],
            )
            assert roland_snapshot.digest == roland_merged

            both_merged = self.request_product(
                Digest,
                [
                    MergeDigests(
                        (roland_snapshot.digest, susannah_snapshot.digest))
                ],
            )
            assert both_snapshot.digest == both_merged
Exemplo n.º 5
0
 def test_multiple_snapshots_from_outside_buildroot(self) -> None:
     with temporary_dir() as temp_dir:
         Path(temp_dir, "roland").write_text("European Burmese")
         Path(temp_dir, "susannah").write_text("I don't know")
         scheduler = self.mk_scheduler(rules=fs_rules())
         snapshots = scheduler.capture_snapshots(
             (
                 PathGlobsAndRoot(PathGlobs(["roland"]), temp_dir),
                 PathGlobsAndRoot(PathGlobs(["susannah"]), temp_dir),
                 PathGlobsAndRoot(PathGlobs(["doesnotexist"]), temp_dir),
             )
         )
         assert 3 == len(snapshots)
         self.assert_snapshot_equals(
             snapshots[0],
             ["roland"],
             Digest("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16", 80),
         )
         self.assert_snapshot_equals(
             snapshots[1],
             ["susannah"],
             Digest("d3539cfc21eb4bab328ca9173144a8e932c515b1b9e26695454eeedbc5a95f6f", 82),
         )
         self.assert_snapshot_equals(snapshots[2], [], EMPTY_DIGEST)
Exemplo n.º 6
0
 def test_snapshot_from_outside_buildroot(self):
     with temporary_dir() as temp_dir:
         with open(os.path.join(temp_dir, "roland"), "w") as f:
             f.write("European Burmese")
         scheduler = self.mk_scheduler(rules=create_fs_rules())
         globs = PathGlobs(["*"])
         snapshot = scheduler.capture_snapshots(
             (PathGlobsAndRoot(globs, temp_dir), ))[0]
         self.assert_snapshot_equals(
             snapshot,
             ["roland"],
             Digest(
                 "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
                 80),
         )
Exemplo n.º 7
0
 def prime_store_with_roland_digest(self):
     """This method primes the store with a directory of a file named 'roland' and contents 'European Burmese'."""
     with temporary_dir() as temp_dir:
         with open(os.path.join(temp_dir, "roland"), "w") as f:
             f.write("European Burmese")
         scheduler = self.mk_scheduler(rules=create_fs_rules())
         globs = PathGlobs(("*", ), ())
         snapshot = scheduler.capture_snapshots(
             (PathGlobsAndRoot(globs, text_type(temp_dir)), ))[0]
         self.assert_snapshot_equals(
             snapshot, ["roland"],
             DirectoryDigest(
                 text_type(
                     "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16"
                 ), 80))
Exemplo n.º 8
0
    def prime_store_with_roland_digest(self):
        """This method primes the store with a directory of a file named 'roland' and contents
        'European Burmese'."""
        with temporary_dir() as temp_dir:
            with open(os.path.join(temp_dir, "roland"), "w") as f:
                f.write("European Burmese")
            globs = PathGlobs(["*"])
            snapshot = self.scheduler.capture_snapshots(
                (PathGlobsAndRoot(globs, temp_dir), ))[0]

            expected_digest = Digest(
                "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
                80)
            self.assert_snapshot_equals(snapshot, ["roland"], expected_digest)
        return expected_digest
Exemplo n.º 9
0
  def _set_directory_digests_for_valid_target_classpath_directories(self, valid_targets, compile_contexts):
    def _get_relative_classpath_for_target(target):
      cc = self.select_runtime_context(compile_contexts[target])
      if self.get_options().use_classpath_jars:
        return fast_relpath(cc.jar_file.path, get_buildroot())
      else:
        return fast_relpath(cc.classes_dir.path, get_buildroot()) + '/**'

    snapshots = self.context._scheduler.capture_snapshots(
      tuple(PathGlobsAndRoot(PathGlobs(
        [_get_relative_classpath_for_target(target)]
      ), get_buildroot()) for target in valid_targets))
    for target, snapshot in list(zip(valid_targets, snapshots)):
      cc = self.select_runtime_context(compile_contexts[target])
      self._set_directory_digest_for_compile_context(cc, snapshot.directory_digest)
Exemplo n.º 10
0
    def register_extra_products_from_contexts(self, targets, compile_contexts):
        super().register_extra_products_from_contexts(targets, compile_contexts)

        def confify(entries):
            return [(conf, e) for e in entries for conf in self._confs]

        # Ensure that the jar/rsc jar is on the rsc_mixed_compile_classpath.
        for target in targets:
            merged_cc = compile_contexts[target]
            zinc_cc = merged_cc.zinc_cc
            rsc_cc = merged_cc.rsc_cc
            # Make sure m.jar is digested if it exists when the target is validated.
            if rsc_cc.rsc_jar_file.directory_digest is None and os.path.exists(
                rsc_cc.rsc_jar_file.path
            ):
                relpath = fast_relpath(rsc_cc.rsc_jar_file.path, get_buildroot())
                (classes_dir_snapshot,) = self.context._scheduler.capture_snapshots(
                    [
                        PathGlobsAndRoot(
                            PathGlobs([relpath]), get_buildroot(), Digest.load(relpath),
                        ),
                    ]
                )
                rsc_cc.rsc_jar_file.hydrate_missing_directory_digest(
                    classes_dir_snapshot.directory_digest
                )

            if rsc_cc.workflow is not None:
                cp_entries = match(
                    rsc_cc.workflow,
                    {
                        self.JvmCompileWorkflowType.zinc_only: lambda: confify(
                            [self._classpath_for_context(zinc_cc)]
                        ),
                        self.JvmCompileWorkflowType.zinc_java: lambda: confify(
                            [self._classpath_for_context(zinc_cc)]
                        ),
                        self.JvmCompileWorkflowType.rsc_and_zinc: lambda: confify(
                            [rsc_cc.rsc_jar_file]
                        ),
                        self.JvmCompileWorkflowType.outline_and_zinc: lambda: confify(
                            [rsc_cc.rsc_jar_file]
                        ),
                    },
                )()
                self.context.products.get_data("rsc_mixed_compile_classpath").add_for_target(
                    target, cp_entries
                )
Exemplo n.º 11
0
    def add_directory_digests_for_jars(self, targets_and_jars):
        """For each target, get DirectoryDigests for its jars and return them zipped with the jars.

        :param targets_and_jars: List of tuples of the form (Target, [pants.java.jar.jar_dependency_utils.ResolveJar])
        :return: list[tuple[(Target, list[pants.java.jar.jar_dependency_utils.ResolveJar])]
        """

        targets_and_jars = list(targets_and_jars)

        if not targets_and_jars:
            return targets_and_jars

        jar_paths = []
        for target, jars_to_snapshot in targets_and_jars:
            for jar in jars_to_snapshot:
                jar_paths.append(fast_relpath(jar.pants_path, get_buildroot()))

        # Capture Snapshots for jars, using an optional adjacent digest. Create the digest afterward
        # if it does not exist.
        snapshots = self.context._scheduler.capture_snapshots(
            tuple(
                PathGlobsAndRoot(
                    PathGlobs([jar]),
                    get_buildroot(),
                    Digest.load(jar),
                ) for jar in jar_paths))
        for snapshot, jar_path in zip(snapshots, jar_paths):
            snapshot.digest.dump(jar_path)

        # We want to map back the list[Snapshot] to targets_and_jars
        # We assume that (1) jars_to_snapshot has the same number of ResolveJars as snapshots does Snapshots,
        # and that (2) capture_snapshots preserves ordering.
        digests = [snapshot.digest for snapshot in snapshots]
        digest_iterator = iter(digests)

        snapshotted_targets_and_jars = []
        for target, jars_to_snapshot in targets_and_jars:
            snapshotted_jars = [
                ResolvedJar(
                    coordinate=jar.coordinate,
                    cache_path=jar.cache_path,
                    pants_path=jar.pants_path,
                    directory_digest=next(digest_iterator),
                ) for jar in jars_to_snapshot
            ]
            snapshotted_targets_and_jars.append((target, snapshotted_jars))

        return snapshotted_targets_and_jars
Exemplo n.º 12
0
 def _double_check_cache_for_vts(self, vts, zinc_compile_context):
     # Double check the cache before beginning compilation
     if self.check_cache(vts):
         self.context.log.debug(
             f"Snapshotting results for {vts.target.address.spec}")
         classpath_entry = self._classpath_for_context(zinc_compile_context)
         relpath = fast_relpath(classpath_entry.path, get_buildroot())
         (classes_dir_snapshot,
          ) = self.context._scheduler.capture_snapshots([
              PathGlobsAndRoot(PathGlobs([relpath]), get_buildroot(),
                               Digest.load(relpath))
          ])
         classpath_entry.hydrate_missing_directory_digest(
             classes_dir_snapshot.digest)
         # Re-validate the vts!
         vts.update()
Exemplo n.º 13
0
    def _compile_compiler_bridge(self, context):
        """Compile the compiler bridge to be used by zinc, using our scala bootstrapper. It will
        compile and cache the jar, and materialize it if not already there.

        :param context: The context of the task trying to compile the bridge.
                        This is mostly needed to use its scheduler to create digests of the relevant jars.
        :return: The absolute path to the compiled scala-compiler-bridge jar.
        """
        bridge_jar_name = "scala-compiler-bridge.jar"
        bridge_jar = os.path.join(self._compiler_bridge_cache_dir, bridge_jar_name)
        global_bridge_cache_dir = os.path.join(
            self._zinc_factory.get_options().pants_bootstrapdir,
            fast_relpath(self._compiler_bridge_cache_dir, self._workdir()),
        )
        globally_cached_bridge_jar = os.path.join(global_bridge_cache_dir, bridge_jar_name)

        # Workaround to avoid recompiling the bridge for every integration test
        # We check the bootstrapdir (.cache) for the bridge.
        # If it exists, we make a copy to the buildroot.
        #
        # TODO Remove when action caches are implemented.
        if os.path.exists(globally_cached_bridge_jar):
            # Cache the bridge jar under buildroot, to allow snapshotting
            safe_mkdir(self._relative_to_buildroot(self._compiler_bridge_cache_dir))
            safe_hardlink_or_copy(globally_cached_bridge_jar, bridge_jar)

        if not os.path.exists(bridge_jar):
            res = self._run_bootstrapper(bridge_jar, context)
            context._scheduler.materialize_directory(
                DirectoryToMaterialize(res.output_directory_digest)
            )
            # For the workaround above to work, we need to store a copy of the bridge in
            # the bootstrapdir cache (.cache).
            safe_mkdir(global_bridge_cache_dir)
            safe_hardlink_or_copy(bridge_jar, globally_cached_bridge_jar)

            return ClasspathEntry(bridge_jar, res.output_directory_digest)
        else:
            bridge_jar_snapshot = context._scheduler.capture_snapshots(
                (
                    PathGlobsAndRoot(
                        PathGlobs((self._relative_to_buildroot(bridge_jar),)), get_buildroot()
                    ),
                )
            )[0]
            bridge_jar_digest = bridge_jar_snapshot.directory_digest
            return ClasspathEntry(bridge_jar, bridge_jar_digest)
Exemplo n.º 14
0
    def _capture_sources(self, vts):
        to_capture = []
        results_dirs = []
        filespecs = []

        for vt in vts:
            target = vt.target
            # Compute the (optional) subdirectory of the results_dir to generate code to. This
            # path will end up in the generated FilesetWithSpec and target, and thus needs to be
            # located below the stable/symlinked `vt.results_dir`.
            synthetic_target_dir = self.synthetic_target_dir(
                target, vt.results_dir)

            files = self.sources_globs

            results_dir_relpath = fast_relpath(synthetic_target_dir,
                                               get_buildroot())
            buildroot_relative_globs = tuple(
                os.path.join(results_dir_relpath, file) for file in files)
            buildroot_relative_excludes = tuple(
                os.path.join(results_dir_relpath, file)
                for file in self.sources_exclude_globs)
            to_capture.append(
                PathGlobsAndRoot(
                    PathGlobs(buildroot_relative_globs,
                              buildroot_relative_excludes),
                    text_type(get_buildroot()),
                    # The digest is stored adjacent to the hash-versioned `vt.current_results_dir`.
                    Digest.load(vt.current_results_dir),
                ))
            results_dirs.append(results_dir_relpath)
            filespecs.append(
                FilesetRelPathWrapper.to_filespec(buildroot_relative_globs))

        snapshots = self.context._scheduler.capture_snapshots(
            tuple(to_capture))

        for snapshot, vt in zip(snapshots, vts):
            snapshot.directory_digest.dump(vt.current_results_dir)

        return tuple(
            EagerFilesetWithSpec(
                results_dir_relpath,
                filespec,
                snapshot,
            ) for (results_dir_relpath, filespec,
                   snapshot) in zip(results_dirs, filespecs, snapshots))
Exemplo n.º 15
0
    def process_remote_sources(self):
        """Create synthetic targets with populated sources from remote_sources targets."""
        unpacked_sources = self.context.products.get_data(UnpackedArchives)
        remote_sources_targets = self.context.targets(
            predicate=lambda t: isinstance(t, RemoteSources))
        if not remote_sources_targets:
            return

        snapshot_specs = []
        filespecs = []
        unpack_dirs = []
        for target in remote_sources_targets:
            unpacked_archive = unpacked_sources[target.sources_target]
            sources = unpacked_archive.found_files
            rel_unpack_dir = unpacked_archive.rel_unpack_dir
            self.context.log.debug(
                'target: {}, rel_unpack_dir: {}, sources: {}'.format(
                    target, rel_unpack_dir, sources))
            sources_in_dir = tuple(
                os.path.join(rel_unpack_dir, source) for source in sources)
            snapshot_specs.append(
                PathGlobsAndRoot(
                    PathGlobs(sources_in_dir),
                    get_buildroot(),
                ))
            filespecs.append({'globs': sources_in_dir})
            unpack_dirs.append(rel_unpack_dir)

        snapshots = self.context._scheduler.capture_snapshots(
            tuple(snapshot_specs))
        for target, snapshot, filespec, rel_unpack_dir in \
          zip(remote_sources_targets, snapshots, filespecs, unpack_dirs):
            synthetic_target = self.context.add_new_target(
                address=Address(os.path.relpath(self.workdir, get_buildroot()),
                                target.id),
                target_type=target.destination_target_type,
                dependencies=target.dependencies,
                sources=EagerFilesetWithSpec(rel_unpack_dir, filespec,
                                             snapshot),
                derived_from=target,
                **target.destination_target_args)
            self.context.log.debug(
                'synthetic_target: {}'.format(synthetic_target))
            for dependent in self.context.build_graph.dependents_of(
                    target.address):
                self.context.build_graph.inject_dependency(
                    dependent, synthetic_target.address)
Exemplo n.º 16
0
        def _snapshot_resources(resources, prefix='.'):
            with temporary_dir() as root_dir:
                for filename, filecontent in resources.items():
                    safe_file_dump(
                        os.path.join(os.path.join(root_dir, prefix), filename),
                        filecontent)

                extra_resources_relative_to_rootdir = {
                    os.path.join(prefix, k): v
                    for k, v in resources.items()
                }
                snapshot, = self.context._scheduler.capture_snapshots([
                    PathGlobsAndRoot(
                        PathGlobs(extra_resources_relative_to_rootdir),
                        root_dir)
                ])

            return snapshot.directory_digest
Exemplo n.º 17
0
    def add_directory_digests_for_jars(self, jars):
        """Get DirectoryDigests for jars and return them zipped with the jars.

    :param jars: List of pants.java.jar.jar_dependency_utils.ResolveJar
    :return: List of ResolveJars.
    """
        snapshots = self.context._scheduler.capture_snapshots(
            tuple(
                PathGlobsAndRoot(
                    PathGlobs([fast_relpath(jar.pants_path, get_buildroot())]),
                    get_buildroot()) for jar in jars))
        return [
            ResolvedJar(coordinate=jar.coordinate,
                        cache_path=jar.cache_path,
                        pants_path=jar.pants_path,
                        directory_digest=directory_digest)
            for jar, directory_digest in list(
                zip(jars,
                    [snapshot.directory_digest for snapshot in snapshots]))
        ]
Exemplo n.º 18
0
    def _capture_sources(self, targets_and_dirs):
        to_capture = []
        results_dirs = []
        filespecs = []

        for target, synthetic_target_dir in targets_and_dirs:
            if self.sources_globs is None:
                files = list(self.find_sources(target, synthetic_target_dir))
            else:
                files = self.sources_globs

            results_dir_relpath = os.path.relpath(synthetic_target_dir,
                                                  get_buildroot())
            buildroot_relative_globs = tuple(
                os.path.join(results_dir_relpath, file) for file in files)
            buildroot_relative_excludes = tuple(
                os.path.join(results_dir_relpath, file)
                for file in self.sources_exclude_globs)
            to_capture.append(
                PathGlobsAndRoot(
                    PathGlobs(buildroot_relative_globs,
                              buildroot_relative_excludes),
                    str(get_buildroot()),
                ))
            results_dirs.append(results_dir_relpath)
            filespecs.append(
                FilesetRelPathWrapper.to_filespec(buildroot_relative_globs))

        snapshots = self.context._scheduler.capture_snapshots(
            tuple(to_capture))

        return tuple(
            EagerFilesetWithSpec(
                results_dir_relpath,
                filespec,
                snapshot,
            ) for (results_dir_relpath, filespec,
                   snapshot) in zip(results_dirs, filespecs, snapshots))
Exemplo n.º 19
0
    def _capture_resources(self, vts):
        """Given a list of VersionedTargets, capture DirectoryDigests for all of them.

        :returns: A list of tuples of VersionedTargets and digests for their content.
        """
        # Capture Snapshots for each directory, using an optional adjacent digest. Create the digest
        # afterward if it does not exist.
        buildroot = get_buildroot()
        snapshots = self.context._scheduler.capture_snapshots(
            tuple(
                PathGlobsAndRoot(
                    PathGlobs([
                        os.path.join(fast_relpath(vt.results_dir, buildroot),
                                     "**")
                    ]),
                    buildroot,
                    Digest.load(vt.current_results_dir),
                ) for vt in vts))
        result = []
        for vt, snapshot in zip(vts, snapshots):
            snapshot.directory_digest.dump(vt.current_results_dir)
            result.append((vt, snapshot.directory_digest))
        return result
Exemplo n.º 20
0
 def find_libs_path_globs(self, names):
     libs_abs = self._underlying.find_libs(names)
     libs_unrooted = [self._unroot_lib_path(l) for l in libs_abs]
     path_globs = PathGlobsAndRoot(PathGlobs(tuple(libs_unrooted)),
                                   text_type(self._underlying.home))
     return (libs_unrooted, path_globs)
Exemplo n.º 21
0
    def _runtool_hermetic(self, main, tool_name, distribution, input_digest,
                          ctx):
        tool_classpath_abs = self._rsc_classpath
        tool_classpath = fast_relpath_collection(tool_classpath_abs)

        jvm_options = self._jvm_options

        if self._rsc.use_native_image:
            #jvm_options = []
            if jvm_options:
                raise ValueError(
                    "`{}` got non-empty jvm_options when running with a graal native-image, but this is "
                    "unsupported. jvm_options received: {}".format(
                        self.options_scope, safe_shlex_join(jvm_options)))
            native_image_path, native_image_snapshot = self._rsc.native_image(
                self.context)
            additional_snapshots = [native_image_snapshot]
            initial_args = [native_image_path]
        else:
            additional_snapshots = []
            initial_args = [
                distribution.java,
            ] + self.get_options().jvm_options + [
                '-cp',
                os.pathsep.join(tool_classpath),
                main,
            ]

        argfile_snapshot, = self.context._scheduler.capture_snapshots([
            PathGlobsAndRoot(
                PathGlobs([fast_relpath(ctx.args_file, get_buildroot())]),
                get_buildroot(),
            ),
        ])

        cmd = initial_args + ['@{}'.format(argfile_snapshot.files[0])]

        pathglobs = list(tool_classpath)

        if pathglobs:
            root = PathGlobsAndRoot(PathGlobs(tuple(pathglobs)),
                                    get_buildroot())
            # dont capture snapshot, if pathglobs is empty
            path_globs_input_digest = self.context._scheduler.capture_snapshots(
                (root, ))[0].directory_digest

        epr_input_files = self.context._scheduler.merge_directories(
            ((path_globs_input_digest, ) if path_globs_input_digest else ()) +
            ((input_digest, ) if input_digest else ()) +
            tuple(s.directory_digest for s in additional_snapshots) +
            (argfile_snapshot.directory_digest, ))

        epr = ExecuteProcessRequest(
            argv=tuple(cmd),
            input_files=epr_input_files,
            output_files=(fast_relpath(ctx.rsc_jar_file.path,
                                       get_buildroot()), ),
            output_directories=tuple(),
            timeout_seconds=15 * 60,
            description='run {} for {}'.format(tool_name, ctx.target),
            # TODO: These should always be unicodes
            # Since this is always hermetic, we need to use `underlying.home` because
            # ExecuteProcessRequest requires an existing, local jdk location.
            jdk_home=distribution.underlying_home,
        )
        res = self.context.execute_process_synchronously_without_raising(
            epr, self.name(), [WorkUnitLabel.COMPILER])

        if res.exit_code != 0:
            raise TaskError(res.stderr, exit_code=res.exit_code)

        # TODO: parse the output of -Xprint:timings for rsc and write it to self._record_target_stats()!

        res.output_directory_digest.dump(ctx.rsc_jar_file.path)

        ctx.rsc_jar_file = ClasspathEntry(ctx.rsc_jar_file.path,
                                          res.output_directory_digest)

        self.context._scheduler.materialize_directories((
            DirectoryToMaterialize(
                # NB the first element here is the root to materialize into, not the dir to snapshot
                get_buildroot(),
                res.output_directory_digest), ))

        return res
Exemplo n.º 22
0
  def _runtool(
    self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
    if self.execution_strategy == self.HERMETIC:
      with self.context.new_workunit(tool_name) as wu:
        tool_classpath_abs = self.tool_classpath(tool_name)
        tool_classpath = fast_relpath_collection(tool_classpath_abs)

        classpath_for_cmd = os.pathsep.join(tool_classpath)
        cmd = [
          distribution.java,
        ]
        cmd.extend(self.get_options().jvm_options)
        cmd.extend(['-cp', classpath_for_cmd])
        cmd.extend([main])
        cmd.extend(args)

        pathglobs = list(tool_classpath)
        pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f) for f in input_files)

        if pathglobs:
          root = PathGlobsAndRoot(
          PathGlobs(tuple(pathglobs)),
          text_type(get_buildroot()))
          # dont capture snapshot, if pathglobs is empty
          path_globs_input_digest = self.context._scheduler.capture_snapshots((root,))[0].directory_digest

        if path_globs_input_digest and input_digest:
          epr_input_files = self.context._scheduler.merge_directories(
              (path_globs_input_digest, input_digest))
        else:
          epr_input_files = path_globs_input_digest or input_digest

        epr = ExecuteProcessRequest(
          argv=tuple(cmd),
          input_files=epr_input_files,
          output_files=tuple(),
          output_directories=(output_dir,),
          timeout_seconds=15*60,
          description='run {} for {}'.format(tool_name, tgt),
          # TODO: These should always be unicodes
          # Since this is always hermetic, we need to use `underlying_dist`
          jdk_home=text_type(self._zinc.underlying_dist.home),
        )
        res = self.context.execute_process_synchronously_without_raising(
          epr,
          self.name(),
          [WorkUnitLabel.TOOL])

        if res.exit_code != 0:
          raise TaskError(res.stderr)

        if output_dir:
          dump_digest(output_dir, res.output_directory_digest)
          self.context._scheduler.materialize_directories((
            DirectoryToMaterialize(
              # NB the first element here is the root to materialize into, not the dir to snapshot
              text_type(get_buildroot()),
              res.output_directory_digest),
          ))
          # TODO drop a file containing the digest, named maybe output_dir.digest
        return res
    else:
      with self.context.new_workunit(tool_name) as wu:
        result = self.runjava(classpath=self.tool_classpath(tool_name),
                              main=main,
                              jvm_options=self.get_options().jvm_options,
                              args=args,
                              workunit_name=tool_name,
                              workunit_labels=[WorkUnitLabel.TOOL],
                              dist=distribution
        )
        if result != 0:
          raise TaskError('Running {} failed'.format(tool_name))
        runjava_wu = None
        for c in wu.children:
          if c.name is tool_name:
            runjava_wu = c
            break
        if runjava_wu is None:
          raise Exception('couldnt find work unit for underlying execution')
        return runjava_wu
Exemplo n.º 23
0
    def _runtool_hermetic(self, main, tool_name, distribution, input_digest,
                          ctx):
        use_youtline = tool_name == "scalac-outliner"

        tool_classpath_abs = self._scalac_classpath if use_youtline else self._rsc_classpath
        tool_classpath = fast_relpath_collection(tool_classpath_abs)

        rsc_jvm_options = Rsc.global_instance().get_options().jvm_options

        if not use_youtline and self._rsc.use_native_image:
            if rsc_jvm_options:
                raise ValueError(
                    "`{}` got non-empty jvm_options when running with a graal native-image, but this is "
                    "unsupported. jvm_options received: {}".format(
                        self.options_scope, safe_shlex_join(rsc_jvm_options)))
            native_image_path, native_image_snapshot = self._rsc.native_image(
                self.context)
            additional_snapshots = [native_image_snapshot]
            initial_args = [native_image_path]
        else:
            additional_snapshots = []
            initial_args = (
                [distribution.java] + rsc_jvm_options +
                ["-cp", os.pathsep.join(tool_classpath), main])

        (argfile_snapshot, ) = self.context._scheduler.capture_snapshots([
            PathGlobsAndRoot(
                PathGlobs([fast_relpath(ctx.args_file, get_buildroot())]),
                get_buildroot(),
            ),
        ])

        cmd = initial_args + [f"@{argfile_snapshot.files[0]}"]

        pathglobs = list(tool_classpath)

        if pathglobs:
            root = PathGlobsAndRoot(PathGlobs(tuple(pathglobs)),
                                    get_buildroot())
            # dont capture snapshot, if pathglobs is empty
            path_globs_input_digest = self.context._scheduler.capture_snapshots(
                (root, ))[0].digest

        epr_input_files = self.context._scheduler.merge_directories(
            ((path_globs_input_digest, ) if path_globs_input_digest else ()) +
            ((input_digest, ) if input_digest else ()) +
            tuple(s.digest
                  for s in additional_snapshots) + (argfile_snapshot.digest, ))

        epr = Process(
            argv=tuple(cmd),
            input_digest=epr_input_files,
            output_files=(fast_relpath(ctx.rsc_jar_file.path,
                                       get_buildroot()), ),
            output_directories=tuple(),
            timeout_seconds=15 * 60,
            description=f"run {tool_name} for {ctx.target}",
            # TODO: These should always be unicodes
            # Since this is always hermetic, we need to use `underlying.home` because
            # Process requires an existing, local jdk location.
            jdk_home=distribution.underlying_home,
            is_nailgunnable=True,
        )
        res = self.context.execute_process_synchronously_without_raising(
            epr, self.name(), [WorkUnitLabel.COMPILER])

        if res.exit_code != 0:
            raise TaskError(res.stderr, exit_code=res.exit_code)

        # TODO: parse the output of -Xprint:timings for rsc and write it to self._record_target_stats()!

        res.output_digest.dump(ctx.rsc_jar_file.path)
        self.context._scheduler.materialize_directory(
            DirectoryToMaterialize(res.output_digest), )
        ctx.rsc_jar_file.hydrate_missing_directory_digest(res.output_digest)

        return res
Exemplo n.º 24
0
def test_remove_prefix(rule_runner: RuleRunner) -> None:
    relevant_files = (
        "characters/dark_tower/roland",
        "characters/dark_tower/susannah",
    )
    all_files = (
        "books/dark_tower/gunslinger",
        "characters/altered_carbon/kovacs",
        *relevant_files,
        "index",
    )

    with temporary_dir() as temp_dir:
        safe_file_dump(os.path.join(temp_dir, "index"), "books\ncharacters\n")
        safe_file_dump(
            os.path.join(temp_dir, "characters", "altered_carbon", "kovacs"),
            "Envoy",
            makedirs=True,
        )

        tower_dir = os.path.join(temp_dir, "characters", "dark_tower")
        safe_file_dump(os.path.join(tower_dir, "roland"), "European Burmese", makedirs=True)
        safe_file_dump(os.path.join(tower_dir, "susannah"), "Not sure actually", makedirs=True)

        safe_file_dump(
            os.path.join(temp_dir, "books", "dark_tower", "gunslinger"),
            "1982",
            makedirs=True,
        )

        snapshot, snapshot_with_extra_files = rule_runner.scheduler.capture_snapshots(
            [
                PathGlobsAndRoot(PathGlobs(["characters/dark_tower/*"]), temp_dir),
                PathGlobsAndRoot(PathGlobs(["**"]), temp_dir),
            ]
        )

        # Check that we got the full snapshots that we expect
        assert snapshot.files == relevant_files
        assert snapshot_with_extra_files.files == all_files

        # Strip empty prefix:
        zero_prefix_stripped_digest = rule_runner.request(
            Digest, [RemovePrefix(snapshot.digest, "")]
        )
        assert snapshot.digest == zero_prefix_stripped_digest

        # Strip a non-empty prefix shared by all files:
        stripped_digest = rule_runner.request(
            Digest, [RemovePrefix(snapshot.digest, "characters/dark_tower")]
        )
        assert stripped_digest == Digest(
            fingerprint="71e788fc25783c424db555477071f5e476d942fc958a5d06ffc1ed223f779a8c",
            serialized_bytes_length=162,
        )

        expected_snapshot = assert_single_element(
            rule_runner.scheduler.capture_snapshots([PathGlobsAndRoot(PathGlobs(["*"]), tower_dir)])
        )
        assert expected_snapshot.files == ("roland", "susannah")
        assert stripped_digest == expected_snapshot.digest

        # Try to strip a prefix which isn't shared by all files:
        with pytest.raises(Exception) as exc:
            rule_runner.request(
                Digest,
                [RemovePrefix(snapshot_with_extra_files.digest, "characters/dark_tower")],
            )
        assert (
            "Cannot strip prefix characters/dark_tower from root directory (Digest "
            "with hash Fingerprint<28c47f77867f0c8d577d2ada2f06b03fc8e5ef2d780e8942713b26c5e3f434b8>)"
            " - root directory contained non-matching directory named: books and file named: index"
        ) in str(exc.value)
Exemplo n.º 25
0
 def pathglob_for(filename):
   return PathGlobsAndRoot(
     PathGlobs(
       (fast_relpath_optional(filename, get_buildroot()),)),
     text_type(get_buildroot()))
Exemplo n.º 26
0
  def console_output(self, targets):
    if not self.get_options().transitive:
      targets = self.context.target_roots

    input_snapshots = tuple(
      target.sources_snapshot(scheduler=self.context._scheduler) for target in targets
    )
    input_files = {f for snapshot in input_snapshots for f in snapshot.files}

    # TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
    with temporary_dir() as tmpdir:
      list_file = os.path.join(tmpdir, 'input_files_list')
      with open(list_file, 'w') as list_file_out:
        for input_file in sorted(input_files):
          list_file_out.write(input_file)
          list_file_out.write('\n')
      list_file_snapshot = self.context._scheduler.capture_snapshots((
        PathGlobsAndRoot(
          PathGlobs(('input_files_list',)),
          tmpdir,
        ),
      ))[0]

    cloc_path, cloc_snapshot = ClocBinary.global_instance().hackily_snapshot(self.context)

    directory_digest = self.context._scheduler.merge_directories(tuple(s.directory_digest for s in
      input_snapshots + (
      cloc_snapshot,
      list_file_snapshot,
    )))

    cmd = (
      '/usr/bin/perl',
      cloc_path,
      '--skip-uniqueness',
      '--ignored=ignored',
      '--list-file=input_files_list',
      '--report-file=report',
    )

    # The cloc script reaches into $PATH to look up perl. Let's assume it's in /usr/bin.
    req = ExecuteProcessRequest(
      argv=cmd,
      input_files=directory_digest,
      output_files=('ignored', 'report'),
      description='cloc',
    )
    exec_result = self.context.execute_process_synchronously_without_raising(req, 'cloc', (WorkUnitLabel.TOOL,))

    files_content_tuple = self.context._scheduler.product_request(
      FilesContent,
      [exec_result.output_directory_digest]
    )[0].dependencies

    files_content = {fc.path: fc.content.decode() for fc in files_content_tuple}
    for line in files_content['report'].split('\n'):
      yield line

    if self.get_options().ignored:
      yield 'Ignored the following files:'
      for line in files_content['ignored'].split('\n'):
        yield line
Exemplo n.º 27
0
    def _compile_hermetic(self, jvm_options, ctx, classes_dir, jar_file,
                          compiler_bridge_classpath_entry,
                          dependency_classpath, scalac_classpath_entries):
        zinc_relpath = fast_relpath(self._zinc.zinc.path, get_buildroot())

        snapshots = [
            ctx.target.sources_snapshot(self.context._scheduler),
        ]

        # scala_library() targets with java_sources have circular dependencies on those java source
        # files, and we provide them to the same zinc command line that compiles the scala, so we need
        # to make sure those source files are available in the hermetic execution sandbox.
        java_sources_targets = getattr(ctx.target, 'java_sources', [])
        java_sources_snapshots = [
            tgt.sources_snapshot(self.context._scheduler)
            for tgt in java_sources_targets
        ]
        snapshots.extend(java_sources_snapshots)

        # Ensure the dependencies and compiler bridge jars are available in the execution sandbox.
        relevant_classpath_entries = (
            dependency_classpath + [
                compiler_bridge_classpath_entry,
                self._nailgun_server_classpath_entry(
                ),  # We include nailgun-server, to use it to start servers when needed from the hermetic execution case.
            ])
        directory_digests = [
            entry.directory_digest for entry in relevant_classpath_entries
            if entry.directory_digest
        ]
        if len(directory_digests) != len(relevant_classpath_entries):
            for dep in relevant_classpath_entries:
                if not dep.directory_digest:
                    raise AssertionError(
                        "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                        "execution of zinc".format(dep))
        directory_digests.extend(
            classpath_entry.directory_digest
            for classpath_entry in scalac_classpath_entries)

        if self._zinc.use_native_image:
            if jvm_options:
                raise ValueError(
                    "`{}` got non-empty jvm_options when running with a graal native-image, but this is "
                    "unsupported. jvm_options received: {}".format(
                        self.options_scope, safe_shlex_join(jvm_options)))
            native_image_path, native_image_snapshot = self._zinc.native_image(
                self.context)
            native_image_snapshots = [
                native_image_snapshot.directory_digest,
            ]
            scala_boot_classpath = [
                classpath_entry.path
                for classpath_entry in scalac_classpath_entries
            ] + [
                # We include rt.jar on the scala boot classpath because the compiler usually gets its
                # contents from the VM it is executing in, but not in the case of a native image. This
                # resolves a `object java.lang.Object in compiler mirror not found.` error.
                '.jdk/jre/lib/rt.jar',
                # The same goes for the jce.jar, which provides javax.crypto.
                '.jdk/jre/lib/jce.jar',
            ]
            image_specific_argv = [
                native_image_path,
                '-java-home',
                '.jdk',
                '-Dscala.boot.class.path={}'.format(
                    os.pathsep.join(scala_boot_classpath)),
                '-Dscala.usejavacp=true',
            ]
        else:
            native_image_snapshots = []
            # TODO: Lean on distribution for the bin/java appending here
            image_specific_argv = ['.jdk/bin/java'] + jvm_options + [
                '-cp', zinc_relpath, Zinc.ZINC_COMPILE_MAIN
            ]

        argfile_snapshot, = self.context._scheduler.capture_snapshots([
            PathGlobsAndRoot(
                PathGlobs([fast_relpath(ctx.args_file, get_buildroot())]),
                get_buildroot(),
            ),
        ])

        relpath_to_analysis = fast_relpath(ctx.analysis_file, get_buildroot())
        merged_local_only_scratch_inputs = self._compute_local_only_inputs(
            classes_dir, relpath_to_analysis, jar_file)

        # TODO: Extract something common from Executor._create_command to make the command line
        argv = image_specific_argv + ['@{}'.format(argfile_snapshot.files[0])]

        merged_input_digest = self.context._scheduler.merge_directories(
            [self._zinc.zinc.directory_digest] +
            [s.directory_digest for s in snapshots] + directory_digests +
            native_image_snapshots + [
                self.post_compile_extra_resources_digest(ctx),
                argfile_snapshot.directory_digest
            ])

        # NB: We always capture the output jar, but if classpath jars are not used, we additionally
        # capture loose classes from the workspace. This is because we need to both:
        #   1) allow loose classes as an input to dependent compiles
        #   2) allow jars to be materialized at the end of the run.
        output_directories = () if self.get_options().use_classpath_jars else (
            classes_dir, )

        req = ExecuteProcessRequest(
            argv=tuple(argv),
            input_files=merged_input_digest,
            output_files=(jar_file, relpath_to_analysis),
            output_directories=output_directories,
            description="zinc compile for {}".format(ctx.target.address.spec),
            unsafe_local_only_files_because_we_favor_speed_over_correctness_for_this_rule
            =merged_local_only_scratch_inputs,
            jdk_home=self._zinc.underlying_dist.home,
            is_nailgunnable=True,
        )
        res = self.context.execute_process_synchronously_or_raise(
            req, self.name(), [WorkUnitLabel.COMPILER])

        # TODO: Materialize as a batch in do_compile or somewhere
        self.context._scheduler.materialize_directory(
            DirectoryToMaterialize(res.output_directory_digest))

        # TODO: This should probably return a ClasspathEntry rather than a Digest
        return res.output_directory_digest
Exemplo n.º 28
0
    def _runtool_hermetic(self,
                          main,
                          tool_name,
                          args,
                          distribution,
                          tgt=None,
                          input_files=tuple(),
                          input_digest=None,
                          output_dir=None):
        tool_classpath_abs = self.tool_classpath(tool_name)
        tool_classpath = fast_relpath_collection(tool_classpath_abs)

        # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
        # which py2 doesn't default to. This can be removed when we drop python 2.
        str_jvm_options = [
            text_type(opt) for opt in self.get_options().jvm_options
        ]
        cmd = [
            distribution.java,
        ] + str_jvm_options + [
            '-cp',
            os.pathsep.join(tool_classpath),
            main,
        ] + args

        pathglobs = list(tool_classpath)
        pathglobs.extend(f if os.path.isfile(f) else '{}/**'.format(f)
                         for f in input_files)

        if pathglobs:
            root = PathGlobsAndRoot(PathGlobs(tuple(pathglobs)),
                                    text_type(get_buildroot()))
            # dont capture snapshot, if pathglobs is empty
            path_globs_input_digest = self.context._scheduler.capture_snapshots(
                (root, ))[0].directory_digest

        epr_input_files = self.context._scheduler.merge_directories((
            (path_globs_input_digest, ) if path_globs_input_digest else ()) + (
                (input_digest, ) if input_digest else ()))

        epr = ExecuteProcessRequest(
            argv=tuple(cmd),
            input_files=epr_input_files,
            output_files=tuple(),
            output_directories=(output_dir, ),
            timeout_seconds=15 * 60,
            description='run {} for {}'.format(tool_name, tgt),
            # TODO: These should always be unicodes
            # Since this is always hermetic, we need to use `underlying.home` because
            # ExecuteProcessRequest requires an existing, local jdk location.
            jdk_home=text_type(distribution.underlying_home),
        )
        res = self.context.execute_process_synchronously_without_raising(
            epr, self.name(), [WorkUnitLabel.TOOL])

        if res.exit_code != 0:
            raise TaskError(res.stderr, exit_code=res.exit_code)

        if output_dir:
            res.output_directory_digest.dump(output_dir)
            self.context._scheduler.materialize_directories((
                DirectoryToMaterialize(
                    # NB the first element here is the root to materialize into, not the dir to snapshot
                    text_type(get_buildroot()),
                    res.output_directory_digest), ))
            # TODO drop a file containing the digest, named maybe output_dir.digest
        return res
Exemplo n.º 29
0
    def console_output(self, targets):
        if not self.get_options().transitive:
            targets = self.context.target_roots

        # TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
        with temporary_dir() as tmpdir:
            list_file = os.path.join(tmpdir, 'input_files_list')
            input_files = set()
            with open(list_file, 'w') as list_file_out:
                for target in targets:
                    for source in target.sources_relative_to_buildroot():
                        input_files.add(source)
                        list_file_out.write(source)
                        list_file_out.write(b'\n')
            list_file_snapshot = self.context._scheduler.capture_snapshots(
                (PathGlobsAndRoot(
                    PathGlobs(('input_files_list', )),
                    str(tmpdir),
                ), ))[0]

        cloc_path, cloc_snapshot = ClocBinary.global_instance(
        ).hackily_snapshot(self.context)

        # TODO: This should use an input file snapshot which should be provided on the Target object,
        # rather than hackily re-snapshotting each of the input files.
        # See https://github.com/pantsbuild/pants/issues/5762
        input_pathglobs = PathGlobs(tuple(input_files))
        input_snapshot = self.context._scheduler.product_request(
            Snapshot, [input_pathglobs])[0]

        directory_digest = self.context._scheduler.merge_directories((
            cloc_snapshot.directory_digest,
            input_snapshot.directory_digest,
            list_file_snapshot.directory_digest,
        ))

        cmd = (
            '/usr/bin/perl',
            cloc_path,
            '--skip-uniqueness',
            '--ignored=ignored',
            '--list-file=input_files_list',
            '--report-file=report',
        )

        # The cloc script reaches into $PATH to look up perl. Let's assume it's in /usr/bin.
        req = ExecuteProcessRequest(cmd, (), directory_digest,
                                    ('ignored', 'report'), (), 15 * 60, 'cloc')
        exec_result = self.context.execute_process_synchronously(
            req, 'cloc', (WorkUnitLabel.TOOL, ))

        # TODO: Remove this check when https://github.com/pantsbuild/pants/issues/5719 is resolved.
        if exec_result.exit_code != 0:
            raise TaskError('{} ... exited non-zero ({}).'.format(
                ' '.join(cmd), exec_result.exit_code))

        files_content_tuple = self.context._scheduler.product_request(
            FilesContent,
            [exec_result.output_directory_digest])[0].dependencies

        files_content = {fc.path: fc.content for fc in files_content_tuple}
        for line in files_content['report'].split('\n'):
            yield line

        if self.get_options().ignored:
            yield 'Ignored the following files:'
            for line in files_content['ignored'].split('\n'):
                yield line
Exemplo n.º 30
0
  def test_strip_prefix(self):
    # Set up files:

    relevant_files = (
      'characters/dark_tower/roland',
      'characters/dark_tower/susannah',
    )
    all_files = (
      'books/dark_tower/gunslinger',
      'characters/altered_carbon/kovacs',
    ) + relevant_files + (
      'index',
    )

    with temporary_dir() as temp_dir:
      safe_file_dump(os.path.join(temp_dir, 'index'), 'books\ncharacters\n')
      safe_file_dump(
        os.path.join(temp_dir, "characters", "altered_carbon", "kovacs"),
        "Envoy",
        makedirs=True,
      )

      tower_dir = os.path.join(temp_dir, "characters", "dark_tower")
      safe_file_dump(os.path.join(tower_dir, "roland"), "European Burmese", makedirs=True)
      safe_file_dump(os.path.join(tower_dir, "susannah"), "Not sure actually", makedirs=True)

      safe_file_dump(
        os.path.join(temp_dir, "books", "dark_tower", "gunslinger"),
        "1982",
        makedirs=True,
      )

      snapshot, snapshot_with_extra_files = self.scheduler.capture_snapshots((
        PathGlobsAndRoot(PathGlobs(("characters/dark_tower/*",)), temp_dir),
        PathGlobsAndRoot(PathGlobs(("**",)), temp_dir),
      ))
      # Check that we got the full snapshots that we expect
      self.assertEquals(snapshot.files, relevant_files)
      self.assertEquals(snapshot_with_extra_files.files, all_files)

      # Strip empty prefix:
      zero_prefix_stripped_digest = assert_single_element(self.scheduler.product_request(
        Digest,
        [DirectoryWithPrefixToStrip(snapshot.directory_digest, "")],
      ))
      self.assertEquals(snapshot.directory_digest, zero_prefix_stripped_digest)

      # Strip a non-empty prefix shared by all files:
      stripped_digest = assert_single_element(self.scheduler.product_request(
        Digest,
        [DirectoryWithPrefixToStrip(snapshot.directory_digest, "characters/dark_tower")],
      ))
      self.assertEquals(
        stripped_digest,
        Digest(
          fingerprint='71e788fc25783c424db555477071f5e476d942fc958a5d06ffc1ed223f779a8c',
          serialized_bytes_length=162,
        )
      )
      expected_snapshot = assert_single_element(self.scheduler.capture_snapshots((
        PathGlobsAndRoot(PathGlobs(("*",)), tower_dir),
      )))
      self.assertEquals(expected_snapshot.files, ('roland', 'susannah'))
      self.assertEquals(stripped_digest, expected_snapshot.directory_digest)

      # Try to strip a prefix which isn't shared by all files:
      with self.assertRaisesWithMessageContaining(Exception, "Cannot strip prefix characters/dark_tower from root directory Digest(Fingerprint<28c47f77867f0c8d577d2ada2f06b03fc8e5ef2d780e8942713b26c5e3f434b8>, 243) - root directory contained non-matching directory named: books and file named: index"):
        self.scheduler.product_request(
          Digest,
          [DirectoryWithPrefixToStrip(snapshot_with_extra_files.directory_digest, "characters/dark_tower")]
        )