def test_iter_relative_paths(self): efws = EagerFilesetWithSpec('test_root', {'globs': []}, files=['a', 'b', 'c'], file_hashes={}) result = list(efws.iter_relative_paths()) self.assertEquals(result, ['test_root/a', 'test_root/b', 'test_root/c'])
def test_iter_relative_paths(self): efws = EagerFilesetWithSpec('test_root', {'globs': []}, files=['a', 'b', 'c'], files_hash='deadbeef') result = list(efws.paths_from_buildroot_iter()) self.assertEquals(result, ['test_root/a', 'test_root/b', 'test_root/c'])
def _capture_sources(self, targets_and_dirs): to_capture = [] results_dirs = [] filespecs = [] for target, synthetic_target_dir in targets_and_dirs: if self.sources_globs is None: files = list(self.find_sources(target, synthetic_target_dir)) else: files = self.sources_globs results_dir_relpath = os.path.relpath(synthetic_target_dir, get_buildroot()) buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files) buildroot_relative_excludes = tuple( os.path.join(results_dir_relpath, file) for file in self.sources_exclude_globs ) to_capture.append( PathGlobsAndRoot( PathGlobs(buildroot_relative_globs, buildroot_relative_excludes), text_type(get_buildroot()), ) ) results_dirs.append(results_dir_relpath) filespecs.append(FilesetRelPathWrapper.to_filespec(buildroot_relative_globs)) snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture)) return tuple(EagerFilesetWithSpec( results_dir_relpath, filespec, snapshot, ) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots))
def run_black( self, source_files: List[FileContent], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, skip: bool = False, ) -> Tuple[LintResult, FmtResult]: args = ["--backend-packages2=pants.backend.python.lint.black"] if config is not None: self.create_file(relpath="pyproject.toml", contents=config) args.append("--black-config=pyproject.toml") if passthrough_args: args.append(f"--black-args='{passthrough_args}'") if skip: args.append(f"--black-skip") input_snapshot = self.request_single_product(Snapshot, InputFilesContent(source_files)) target_adaptor = TargetAdaptor( sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot), address=Address.parse("test:target"), ) lint_target = BlackTarget(target_adaptor) fmt_target = BlackTarget(target_adaptor, prior_formatter_result_digest=input_snapshot.directory_digest) options_bootstrapper = create_options_bootstrapper(args=args) lint_result = self.request_single_product(LintResult, Params(lint_target, options_bootstrapper)) fmt_result = self.request_single_product(FmtResult, Params(fmt_target, options_bootstrapper)) return lint_result, fmt_result
def run_bandit( self, source_files: List[FileContent], *, config: Optional[str] = None, passthrough_args: Optional[str] = None, interpreter_constraints: Optional[str] = None, skip: bool = False, ) -> LintResult: args = ["--backend-packages2=pants.backend.python.lint.bandit"] if config: # TODO: figure out how to get this file to exist... self.create_file(relpath=".bandit", contents=config) args.append("--bandit-config=.bandit") if passthrough_args: args.append(f"--bandit-args={passthrough_args}") if skip: args.append(f"--bandit-skip") input_snapshot = self.request_single_product(Snapshot, InputFilesContent(source_files)) target = BanditTarget( PythonTargetAdaptor( sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot), address=Address.parse("test:target"), compatibility=[interpreter_constraints] if interpreter_constraints else None, ) ) return self.request_single_product( LintResult, Params(target, create_options_bootstrapper(args=args)), )
def run_flake8( self, source_files: List[FileContent], *, config: Optional[str] = None, passthrough_args: Optional[Sequence[str]] = None, interpreter_constraints: Optional[Sequence[str]] = None, skip: bool = False, ) -> LintResult: if config is not None: self.create_file(relpath=".flake8", contents=config) input_snapshot = self.request_single_product( Snapshot, InputFilesContent(source_files)) target = Flake8Target( PythonTargetAdaptor( sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot), address=Address.parse("test:target"), compatibility=interpreter_constraints, )) flake8_subsystem = global_subsystem_instance( Flake8, options={ Flake8.options_scope: { "config": ".flake8" if config else None, "args": passthrough_args or [], "skip": skip, } }) return self.request_single_product( LintResult, Params(target, flake8_subsystem, PythonNativeCode.global_instance(), PythonSetup.global_instance(), SubprocessEnvironment.global_instance()))
def test_passes_eager_fileset_with_spec_through(self): self.create_file('foo/a.txt', 'a_contents') fileset = EagerFilesetWithSpec('foo', 'a.txt', {'a.txt': b'12345'}) sf = SourcesField(sources=fileset) self.assertIs(fileset, sf.sources) self.assertEqual(['a.txt'], list(sf.source_paths))
def _eager_fileset_with_spec(spec_path, filespecs, source_files_content, excluded_source_files): excluded = {f.path for f in excluded_source_files.dependencies} file_hashes = { fast_relpath(fc.path, spec_path): sha1(fc.content).digest() for fc in source_files_content.dependencies if fc.path not in excluded } return EagerFilesetWithSpec(spec_path, filespecs, file_hashes)
def _eager_fileset_with_spec(spec_path, filespecs, source_files_digest, excluded_source_files): excluded = {f.path for f in excluded_source_files.dependencies} file_hashes = { fast_relpath(fd.path, spec_path): fd.digest for fd in source_files_digest.dependencies if fd.path not in excluded } return EagerFilesetWithSpec(spec_path, filespecs, file_hashes)
def test_passes_eager_fileset_with_spec_through(self): self.create_file('foo/a.txt', 'a_contents') fileset = EagerFilesetWithSpec('foo', {'globs': ['foo/a.txt']}, ['foo/a.txt'], {'foo/a.txt': b'12345'}) sf = SourcesField(sources=fileset) self.assertIs(fileset, sf.sources) self.assertEqual(['foo/a.txt'], list(sf.source_paths)) self.assertEqual(['foo/foo/a.txt'], list(sf.relative_to_buildroot()))
def test_eager_fileset_with_spec_fails_if_exclude_filespec_not_prefixed_by_relroot( self, ) -> None: with self.assertRaises(ValueError): EagerFilesetWithSpec("foo", { "globs": [], "exclude": [{ "globs": ["notfoo/a.txt"] }] }, EMPTY_SNAPSHOT)
def test_eager_fileset_with_spec_fails_if_exclude_filespec_not_prefixed_by_relroot( self): with self.assertRaises(ValueError): EagerFilesetWithSpec('foo', { 'globs': [], 'exclude': [{ 'globs': ['notfoo/a.txt'] }] }, EMPTY_SNAPSHOT)
def make_target_with_origin( self, source_files: List[FileContent], *, origin: Optional[OriginSpec] = None, ) -> TargetAdaptorWithOrigin: input_snapshot = self.request_single_product(Snapshot, InputFilesContent(source_files)) adaptor = TargetAdaptor( sources=EagerFilesetWithSpec("test", {"globs": []}, snapshot=input_snapshot), address=Address.parse("test:target"), ) if origin is None: origin = SingleAddress(directory="test", name="target") return TargetAdaptorWithOrigin(adaptor, origin)
def test_eager_fileset_with_spec_fails_if_exclude_filespec_not_prefixed_by_relroot( self): with self.assertRaises(ValueError): EagerFilesetWithSpec('foo', { 'globs': [], 'exclude': [{ 'globs': ['notfoo/a.txt'] }] }, files=['files'], files_hash='deadbeef')
def _eager_fileset_with_spec(spec_path, filespec, snapshot, include_dirs=False): rel_include_globs = filespec['globs'] relpath_adjusted_filespec = FilesetRelPathWrapper.to_filespec(rel_include_globs, spec_path) if 'exclude' in filespec: relpath_adjusted_filespec['exclude'] = [FilesetRelPathWrapper.to_filespec(e['globs'], spec_path) for e in filespec['exclude']] return EagerFilesetWithSpec(spec_path, relpath_adjusted_filespec, snapshot, include_dirs=include_dirs)
def _eager_fileset_with_spec(spec_path, filespecs, source_files_digest, excluded_source_files): excluded = {f.path for f in excluded_source_files.dependencies} file_tuples = [(fast_relpath(fd.path, spec_path), fd.digest) for fd in source_files_digest.dependencies if fd.path not in excluded] # NB: In order to preserve declared ordering, we record a list of matched files # independent of the file hash dict. return EagerFilesetWithSpec(spec_path, filespecs, files=tuple(f for f, _ in file_tuples), file_hashes=dict(file_tuples))
def _eager_fileset_with_spec(spec_path, filespec, snapshot, include_dirs=False): fds = snapshot.path_stats if include_dirs else snapshot.files files = tuple(fast_relpath(fd.path, spec_path) for fd in fds) relpath_adjusted_filespec = FilesetRelPathWrapper.to_filespec(filespec['globs'], spec_path) if filespec.has_key('exclude'): relpath_adjusted_filespec['exclude'] = [FilesetRelPathWrapper.to_filespec(e['globs'], spec_path) for e in filespec['exclude']] return EagerFilesetWithSpec(spec_path, relpath_adjusted_filespec, files=files, files_hash=snapshot.fingerprint)
def _eager_fileset_with_spec(spec_path, filespec, snapshot): files = tuple(fast_relpath(fd.path, spec_path) for fd in snapshot.files) relpath_adjusted_filespec = FilesetRelPathWrapper.to_filespec(filespec['globs'], spec_path) if filespec.has_key('exclude'): relpath_adjusted_filespec['exclude'] = [FilesetRelPathWrapper.to_filespec(e['globs'], spec_path) for e in filespec['exclude']] # NB: In order to preserve declared ordering, we record a list of matched files # independent of the file hash dict. return EagerFilesetWithSpec(spec_path, relpath_adjusted_filespec, files=files, files_hash=snapshot.fingerprint)
def run_coordinator_of_tests( self, *, address: Address, origin: Optional[OriginSpec] = None, test_target_type: bool = True, include_sources: bool = True, ) -> AddressAndTestResult: mocked_fileset = EagerFilesetWithSpec( "src", {"globs": []}, snapshot=Snapshot( # TODO: this is not robust to set as an empty digest. Add a test util that provides # some premade snapshots and possibly a generalized make_hydrated_target function. directory_digest=EMPTY_DIRECTORY_DIGEST, files=tuple(["test.py"] if include_sources else []), dirs=(), ), ) adaptor_cls = PythonTestsAdaptor if test_target_type else PythonBinaryAdaptor type_alias = "python_tests" if test_target_type else "python_binary" adaptor = adaptor_cls(address=address, type_alias=type_alias, sources=mocked_fileset) union_membership = UnionMembership(union_rules=OrderedDict( {TestTarget: OrderedSet([PythonTestsAdaptorWithOrigin])})) with self.captured_logging(logging.INFO): result: AddressAndTestResult = run_rule( coordinator_of_tests, rule_args=[ HydratedTargetWithOrigin( target=HydratedTarget(adaptor), origin=(origin or SingleAddress(directory=address.spec_path, name=address.target_name)), ), union_membership, ], mock_gets=[ MockGet( product_type=TestResult, subject_type=TestTarget, mock=lambda _: TestResult( status=Status.SUCCESS, stdout="foo", stderr=""), ), ], union_membership=union_membership, ) return result
def test_passes_eager_fileset_with_spec_through(self): self.create_file('foo/a.txt', 'a_contents') fileset = EagerFilesetWithSpec( rel_root='foo', # Glob spec is relative to build root filespec={'globs': ['foo/foo/a.txt']}, # files are relative to `rel_root` files=['foo/a.txt'], files_hash={'foo/a.txt': b'12345'}) sf = SourcesField(sources=fileset) self.assertIs(fileset, sf.sources) self.assertEqual(['foo/a.txt'], list(sf.source_paths)) self.assertEqual(['foo/foo/a.txt'], list(sf.relative_to_buildroot()))
def _create_sources_with_fingerprint(self, target_workdir, fingerprint, files): """Create an EagerFilesetWithSpec to pass to the sources argument for synthetic target injection. We are creating and passing an EagerFilesetWithSpec to the synthetic target injection in the hopes that it will save the time of having to refingerprint the sources. :param target_workdir: The directory containing the generated code for the target. :param fingerprint: the fingerprint of the VersionedTarget with which the EagerFilesetWithSpec will be created. :param files: a list of exact paths to generated sources. """ results_dir_relpath = os.path.relpath(target_workdir, get_buildroot()) filespec = FilesetRelPathWrapper.to_filespec( [os.path.join(results_dir_relpath, file) for file in files]) return EagerFilesetWithSpec(results_dir_relpath, filespec=filespec, files=files, files_hash='{}.{}'.format(fingerprint.id, fingerprint.hash))
def process_remote_sources(self): """Create synthetic targets with populated sources from remote_sources targets.""" unpacked_sources = self.context.products.get_data(UnpackedArchives) remote_sources_targets = self.context.targets( predicate=lambda t: isinstance(t, RemoteSources)) if not remote_sources_targets: return snapshot_specs = [] filespecs = [] unpack_dirs = [] for target in remote_sources_targets: unpacked_archive = unpacked_sources[target.sources_target] sources = unpacked_archive.found_files rel_unpack_dir = unpacked_archive.rel_unpack_dir self.context.log.debug( 'target: {}, rel_unpack_dir: {}, sources: {}'.format( target, rel_unpack_dir, sources)) sources_in_dir = tuple( os.path.join(rel_unpack_dir, source) for source in sources) snapshot_specs.append( PathGlobsAndRoot( PathGlobs(sources_in_dir), get_buildroot(), )) filespecs.append({'globs': sources_in_dir}) unpack_dirs.append(rel_unpack_dir) snapshots = self.context._scheduler.capture_snapshots( tuple(snapshot_specs)) for target, snapshot, filespec, rel_unpack_dir in \ zip(remote_sources_targets, snapshots, filespecs, unpack_dirs): synthetic_target = self.context.add_new_target( address=Address(os.path.relpath(self.workdir, get_buildroot()), target.id), target_type=target.destination_target_type, dependencies=target.dependencies, sources=EagerFilesetWithSpec(rel_unpack_dir, filespec, snapshot), derived_from=target, **target.destination_target_args) self.context.log.debug( 'synthetic_target: {}'.format(synthetic_target)) for dependent in self.context.build_graph.dependents_of( target.address): self.context.build_graph.inject_dependency( dependent, synthetic_target.address)
def _capture_sources(self, vts): to_capture = [] results_dirs = [] filespecs = [] for vt in vts: target = vt.target # Compute the (optional) subdirectory of the results_dir to generate code to. This # path will end up in the generated FilesetWithSpec and target, and thus needs to be # located below the stable/symlinked `vt.results_dir`. synthetic_target_dir = self.synthetic_target_dir( target, vt.results_dir) files = self.sources_globs results_dir_relpath = fast_relpath(synthetic_target_dir, get_buildroot()) buildroot_relative_globs = tuple( os.path.join(results_dir_relpath, file) for file in files) buildroot_relative_excludes = tuple( os.path.join(results_dir_relpath, file) for file in self.sources_exclude_globs) to_capture.append( PathGlobsAndRoot( PathGlobs(buildroot_relative_globs, buildroot_relative_excludes), text_type(get_buildroot()), # The digest is stored adjacent to the hash-versioned `vt.current_results_dir`. Digest.load(vt.current_results_dir), )) results_dirs.append(results_dir_relpath) filespecs.append( FilesetRelPathWrapper.to_filespec(buildroot_relative_globs)) snapshots = self.context._scheduler.capture_snapshots( tuple(to_capture)) for snapshot, vt in zip(snapshots, vts): snapshot.directory_digest.dump(vt.current_results_dir) return tuple( EagerFilesetWithSpec( results_dir_relpath, filespec, snapshot, ) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots))
def make_target_with_origin( self, source_files: List[FileContent], *, interpreter_constraints: Optional[str] = None, origin: Optional[OriginSpec] = None, ) -> PythonTargetAdaptorWithOrigin: input_snapshot = self.request_single_product(Snapshot, InputFilesContent(source_files)) adaptor_kwargs = dict( sources=EagerFilesetWithSpec("test", {"globs": []}, snapshot=input_snapshot), address=Address.parse("test:target"), ) if interpreter_constraints: adaptor_kwargs["compatibility"] = interpreter_constraints if origin is None: origin = SingleAddress(directory="test", name="target") return PythonTargetAdaptorWithOrigin(PythonTargetAdaptor(**adaptor_kwargs), origin)
def _eager_fileset_with_spec(spec_path, filespec, source_files_digest, excluded_source_files): excluded = {f.path for f in excluded_source_files.dependencies} file_tuples = [(fast_relpath(fd.path, spec_path), fd.digest) for fd in source_files_digest.dependencies if fd.path not in excluded] relpath_adjusted_filespec = FilesetRelPathWrapper.to_filespec(filespec['globs'], spec_path) if filespec.has_key('exclude'): relpath_adjusted_filespec['exclude'] = [FilesetRelPathWrapper.to_filespec(e['globs'], spec_path) for e in filespec['exclude']] # NB: In order to preserve declared ordering, we record a list of matched files # independent of the file hash dict. return EagerFilesetWithSpec(spec_path, relpath_adjusted_filespec, files=tuple(f for f, _ in file_tuples), file_hashes=dict(file_tuples))
def run_coordinator_of_tests( self, *, address: Address, bfaddr_to_address_spec: Optional[Dict[BuildFileAddress, AddressSpec]] = None, test_target_type: bool = True, include_sources: bool = True, ) -> AddressAndTestResult: mocked_fileset = EagerFilesetWithSpec( "src", {"globs": []}, snapshot=Snapshot( # TODO: this is not robust to set as an empty digest. Add a test util that provides # some premade snapshots and possibly a generalized make_hydrated_target function. directory_digest=EMPTY_DIRECTORY_DIGEST, files=tuple(["test.py"] if include_sources else []), dirs=())) target_adaptor = (PythonTestsAdaptor(type_alias='python_tests', sources=mocked_fileset) if test_target_type else PythonBinaryAdaptor( type_alias='python_binary', sources=mocked_fileset)) with self.captured_logging(logging.INFO): result: AddressAndTestResult = run_rule( coordinator_of_tests, rule_args=[ HydratedTarget(address, target_adaptor, ()), UnionMembership( union_rules={TestTarget: [PythonTestsAdaptor]}), AddressProvenanceMap( bfaddr_to_address_spec=bfaddr_to_address_spec or {}), ], mock_gets=[ MockGet( product_type=TestResult, subject_type=PythonTestsAdaptor, mock=lambda _: TestResult( status=Status.SUCCESS, stdout='foo', stderr=''), ), ], ) return result
def _eager_fileset_with_spec( spec_path: str, filespec: Filespec, snapshot: Snapshot, include_dirs: bool = False, ) -> EagerFilesetWithSpec: rel_include_globs = filespec["globs"] relpath_adjusted_filespec = FilesetRelPathWrapper.to_filespec( rel_include_globs, spec_path) if "exclude" in filespec: relpath_adjusted_filespec["exclude"] = [ FilesetRelPathWrapper.to_filespec(e["globs"], spec_path) for e in filespec["exclude"] ] return EagerFilesetWithSpec(spec_path, relpath_adjusted_filespec, snapshot, include_dirs=include_dirs)
def make_hydrated_target( *, name: str = "target", adaptor_type: Type[TargetAdaptor] = PythonTargetAdaptor, include_sources: bool = True, ) -> HydratedTarget: mocked_snapshot = Snapshot( # TODO: this is not robust to set as an empty digest. Add a test util that provides # some premade snapshots and possibly a generalized make_hydrated_target function. directory_digest=EMPTY_DIRECTORY_DIGEST, files=("formatted.txt", "fake.txt") if include_sources else (), dirs=()) return HydratedTarget(address=f"src/{name}", adaptor=adaptor_type( sources=EagerFilesetWithSpec( "src", {"globs": []}, snapshot=mocked_snapshot), name=name, ), dependencies=())
def run_black( self, source_files: List[FileContent], *, config: Optional[str] = None, passthrough_args: Optional[Sequence[str]] = None, ) -> Tuple[LintResult, FmtResult]: if config is not None: self.create_file(relpath="pyproject.toml", contents=config) input_snapshot = self.request_single_product( Snapshot, InputFilesContent(source_files)) target = FormattablePythonTarget( TargetAdaptor( sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot), address=Address.parse("test:target"), )) black_subsystem = global_subsystem_instance( Black, options={ Black.options_scope: { "config": "pyproject.toml" if config else None, "args": passthrough_args or [], } }) black_setup = self.request_single_product( BlackSetup, Params( black_subsystem, PythonNativeCode.global_instance(), PythonSetup.global_instance(), SubprocessEnvironment.global_instance(), )) fmt_and_lint_params = Params(target, black_setup, PythonSetup.global_instance(), SubprocessEnvironment.global_instance()) lint_result = self.request_single_product(LintResult, fmt_and_lint_params) fmt_result = self.request_single_product(FmtResult, fmt_and_lint_params) return lint_result, fmt_result
def run_isort( self, source_files: List[FileContent], *, config: Optional[str] = None, passthrough_args: Optional[Sequence[str]] = None, skip: bool = False, ) -> Tuple[LintResult, FmtResult]: if config is not None: self.create_file(relpath=".isort.cfg", contents=config) input_snapshot = self.request_single_product(Snapshot, InputFilesContent(source_files)) target_adaptor = TargetAdaptor( sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot), address=Address.parse("test:target"), ) lint_target = IsortTarget(target_adaptor) fmt_target = IsortTarget(target_adaptor, prior_formatter_result_digest=input_snapshot.directory_digest) isort_subsystem = global_subsystem_instance( Isort, options={Isort.options_scope: { "config": [".isort.cfg"] if config else None, "args": passthrough_args or [], "skip": skip, }} ) python_subsystems = [ PythonNativeCode.global_instance(), PythonSetup.global_instance(), SubprocessEnvironment.global_instance(), ] isort_setup = self.request_single_product( IsortSetup, Params(isort_subsystem, *python_subsystems) ) lint_result = self.request_single_product( LintResult, Params(lint_target, isort_setup, *python_subsystems) ) fmt_result = self.request_single_product( FmtResult, Params(fmt_target, isort_setup, *python_subsystems) ) return lint_result, fmt_result
def test_iter_relative_paths(self): efws = EagerFilesetWithSpec('test_root', {'globs': []}, files=['a', 'b', 'c'], file_hashes={}) result = list(efws.iter_relative_paths()) self.assertEquals(result, ['test_root/a', 'test_root/b', 'test_root/c'])
def test_iter_relative_paths(self): efws = EagerFilesetWithSpec('test_root', {'globs': []}, files=['a', 'b', 'c'], files_hash='deadbeef') result = list(efws.paths_from_buildroot_iter()) self.assertEquals(result, ['test_root/a', 'test_root/b', 'test_root/c'])