async def find_owners(build_configuration: BuildConfiguration, address_mapper: AddressMapper, owners_request: OwnersRequest) -> BuildFileAddresses: sources_set = OrderedSet(owners_request.sources) dirs_set = OrderedSet(dirname(source) for source in sources_set) # Walk up the buildroot looking for targets that would conceivably claim changed sources. candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set) candidate_targets = await Get(HydratedTargets, Specs(candidate_specs)) # Match the source globs against the expanded candidate targets. def owns_any_source(legacy_target): """Given a `HydratedTarget` instance, check if it owns the given source file.""" target_kwargs = legacy_target.adaptor.kwargs() # Handle `sources`-declaring targets. # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target, # so we don't actually call `fileset.matches` here. # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons: # 1) having two implementations isn't great # 2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match target_sources = target_kwargs.get('sources', None) if target_sources and any_matches_filespec(sources_set, target_sources.filespec): return True return False direct_owners = tuple( ht.adaptor.address for ht in candidate_targets if LegacyAddressMapper.any_is_declaring_file( ht.adaptor.address, sources_set) or owns_any_source(ht)) # If the OwnersRequest does not require dependees, then we're done. if owners_request.include_dependees == 'none': return BuildFileAddresses(direct_owners) else: # Otherwise: find dependees. all_addresses = await Get(BuildFileAddresses, Specs((DescendantAddresses(''), ))) all_hydrated_structs = await MultiGet( Get(HydratedStruct, Address, a.to_address()) for a in all_addresses) all_structs = [hs.value for hs in all_hydrated_structs] bfa = build_configuration.registered_aliases() graph = _DependentGraph.from_iterable( target_types_from_build_file_aliases(bfa), address_mapper, all_structs) if owners_request.include_dependees == 'direct': return BuildFileAddresses( tuple(graph.dependents_of_addresses(direct_owners))) else: assert owners_request.include_dependees == 'transitive' return BuildFileAddresses( tuple(graph.transitive_dependents_of_addresses(direct_owners)))
def test_context_object_on_streaming_workunits( rule_runner: RuleRunner, run_tracker: RunTracker) -> None: scheduler = rule_runner.scheduler def callback(**kwargs) -> None: context = kwargs["context"] assert isinstance(context, StreamingWorkunitContext) completed_workunits = kwargs["completed_workunits"] for workunit in completed_workunits: if "artifacts" in workunit and "stdout_digest" in workunit[ "artifacts"]: digest = workunit["artifacts"]["stdout_digest"] output = context.single_file_digests_to_bytes([digest]) assert output == (b"stdout output\n", ) handler = StreamingWorkunitHandler( scheduler, run_tracker=run_tracker, callbacks=[callback], report_interval_seconds=0.01, max_workunit_verbosity=LogLevel.INFO, specs=Specs.empty(), options_bootstrapper=create_options_bootstrapper([]), ) stdout_process = Process(argv=("/bin/bash", "-c", "/bin/echo 'stdout output'"), description="Stdout process") with handler.session(): rule_runner.request(ProcessResult, [stdout_process])
def _internal_scan_specs(self, specs, fail_fast=True, missing_is_fatal=True): # TODO: This should really use `product_request`, but on the other hand, we need to # deprecate the entire `AddressMapper` interface anyway. See #4769. request = self._scheduler.execution_request([BuildFileAddresses], [Specs(tuple(specs))]) returns, throws = self._scheduler.execute(request) if throws: _, state = throws[0] if isinstance(state.exc, (AddressLookupError, ResolveError)): if missing_is_fatal: raise self.BuildFileScanError( 'Spec `{}` does not match any targets.\n{}'.format( self._specs_string(specs), str(state.exc))) else: # NB: ignore Throws containing ResolveErrors because they are due to missing targets / files return set() else: raise self.BuildFileScanError(str(state.exc)) _, state = returns[0] if missing_is_fatal and not state.value.dependencies: raise self.BuildFileScanError( 'Spec `{}` does not match any targets.'.format( self._specs_string(specs))) return set(state.value.dependencies)
def inject_addresses_closure(self, addresses): addresses = set(addresses) - set(self._target_by_address.keys()) if not addresses: return dependencies = tuple(SingleAddress(a.spec_path, a.target_name) for a in addresses) for _ in self._inject_specs(Specs(dependencies=tuple(dependencies))): pass
def parse_specs( cls, raw_specs: Iterable[str], build_root: Optional[str] = None, exclude_patterns: Optional[Iterable[str]] = None, tags: Optional[Iterable[str]] = None, ) -> Specs: """Parse raw string specs into a Specs object.""" build_root = build_root or get_buildroot() spec_parser = CmdLineSpecParser(build_root) address_specs: OrderedSet[AddressSpec] = OrderedSet() filesystem_specs: OrderedSet[FilesystemSpec] = OrderedSet() for spec_str in raw_specs: parsed_spec = spec_parser.parse_spec(spec_str) if isinstance(parsed_spec, AddressSpec): address_specs.add(parsed_spec) else: filesystem_specs.add(parsed_spec) address_specs_collection = AddressSpecs( dependencies=address_specs, exclude_patterns=exclude_patterns if exclude_patterns else tuple(), tags=tags, ) filesystem_specs_collection = FilesystemSpecs(filesystem_specs) return Specs( address_specs=address_specs_collection, filesystem_specs=filesystem_specs_collection, )
def find_valid_field_sets( superclass: Type, specs: Iterable[Spec], *, no_applicable_behavior: NoApplicableTargetsBehavior = NoApplicableTargetsBehavior.ignore, expect_single_config: bool = False, ) -> TargetRootsToFieldSets: request = TargetRootsToFieldSetsRequest( superclass, goal_description="fake", no_applicable_targets_behavior=no_applicable_behavior, expect_single_field_set=expect_single_config, ) return rule_runner.request( TargetRootsToFieldSets, [ request, Specs( includes=RawSpecs.create( specs, convert_dir_literal_to_address_literal=True, description_of_origin="tests", ), ignores=RawSpecs(description_of_origin="tests"), ), ], )
def parse_specs(cls, target_specs, build_root=None, exclude_patterns=None, tags=None): """Parse string specs into unique `Spec` objects. :param iterable target_specs: An iterable of string specs. :param string build_root: The path to the build root. :returns: An `OrderedSet` of `Spec` objects. """ build_root = build_root or get_buildroot() spec_parser = CmdLineSpecParser(build_root) dependencies = tuple( OrderedSet( spec_parser.parse_spec(spec_str) for spec_str in target_specs)) if not dependencies: return None return [ Specs(dependencies=dependencies, exclude_patterns=exclude_patterns if exclude_patterns else tuple(), tags=tags) ]
def _init_engine(self, local_store_dir: Optional[str] = None) -> None: if self._scheduler is not None: return options_bootstrapper = OptionsBootstrapper.create( args=["--pants-config-files=[]"]) local_store_dir = (local_store_dir or options_bootstrapper.bootstrap_options. for_global_scope().local_store_dir) # NB: This uses the long form of initialization because it needs to directly specify # `cls.alias_groups` rather than having them be provided by bootstrap options. graph_session = EngineInitializer.setup_legacy_graph_extended( pants_ignore_patterns=[], local_store_dir=local_store_dir, build_file_imports_behavior=BuildFileImportsBehavior.error, native=init_native(), options_bootstrapper=options_bootstrapper, build_root=self.build_root, build_configuration=self.build_config(), build_ignore_patterns=None, ).new_session(zipkin_trace_v2=False, build_id="buildid_for_test") self._scheduler = graph_session.scheduler_session self._build_graph, self._address_mapper = graph_session.create_build_graph( Specs(address_specs=AddressSpecs([]), filesystem_specs=FilesystemSpecs([])), self._build_root(), )
def test_fails_on_nonexistent_specs(self): """Test that specs referring to nonexistent targets raise a ResolveError.""" address_family = AddressFamily('root', {'a': ('root/BUILD', TargetAdaptor())}) specs = Specs([SingleAddress('root', 'b'), SingleAddress('root', 'a')]) expected_rx_str = re.escape( """"b" was not found in namespace "root". Did you mean one of: :a""") with self.assertRaisesRegexp(ResolveError, expected_rx_str): self._resolve_build_file_addresses( specs, address_family, self._snapshot(), self._address_mapper()) # Ensure that we still catch nonexistent targets later on in the list of command-line specs. specs = Specs([SingleAddress('root', 'a'), SingleAddress('root', 'b')]) with self.assertRaisesRegexp(ResolveError, expected_rx_str): self._resolve_build_file_addresses( specs, address_family, self._snapshot(), self._address_mapper())
def create( cls, options_bootstrapper: OptionsBootstrapper, options: Options, session: SchedulerSession, build_root: Optional[str] = None, ) -> Specs: specs = cls.parse_specs(raw_specs=options.specs, build_root=build_root) changed_options = ChangedOptions.from_options( options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if specs.provided and changed_options.provided: changed_name = "--changed-since" if changed_options.since else "--changed-diffspec" if specs.filesystem_specs and specs.address_specs: specs_description = "target and file arguments" elif specs.filesystem_specs: specs_description = "file arguments" else: specs_description = "target arguments" raise InvalidSpecConstraint( f"You used `{changed_name}` at the same time as using {specs_description}. Please " "use only one.") if not changed_options.provided: return specs scm = get_scm() if not scm: raise InvalidSpecConstraint( "The `--changed-*` options are not available without a recognized SCM (usually " "Git).") changed_request = ChangedRequest( sources=tuple(changed_options.changed_files(scm=scm)), dependees=changed_options.dependees, ) (changed_addresses, ) = session.product_request( ChangedAddresses, [Params(changed_request, options_bootstrapper)]) logger.debug("changed addresses: %s", changed_addresses) address_specs = [] filesystem_specs = [] for address in cast(ChangedAddresses, changed_addresses): if not address.is_base_target: # TODO: Should adjust Specs parsing to support parsing the disambiguated file # Address, which would bypass-rediscovering owners. filesystem_specs.append(FilesystemLiteralSpec( address.filename)) else: address_specs.append( SingleAddress(address.spec_path, address.target_name)) return Specs( AddressSpecs(address_specs, filter_by_global_options=True), FilesystemSpecs(filesystem_specs), )
def __init__( self, root_dir: str, options_bootstrapper: OptionsBootstrapper, options: Options, build_config: BuildConfiguration, run_tracker: RunTracker, reporting: Reporting, graph_session: LegacyGraphSession, specs: Specs, exiter=sys.exit, ) -> None: """ :param root_dir: The root directory of the pants workspace (aka the "build root"). :param options: The global, pre-initialized Options instance. :param build_config: A pre-initialized BuildConfiguration instance. :param run_tracker: The global, pre-initialized/running RunTracker instance. :param reporting: The global, pre-initialized Reporting instance. :param graph_session: The graph session for this run. :param specs: The specs for this run, i.e. either the address or filesystem specs. :param func exiter: A function that accepts an exit code value and exits. (for tests, Optional) """ self._root_dir = root_dir self._options_bootstrapper = options_bootstrapper self._options = options self._build_config = build_config self._run_tracker = run_tracker self._reporting = reporting self._graph_session = graph_session self._specs = specs self._exiter = exiter self._global_options = options.for_global_scope() self._fail_fast = self._global_options.fail_fast self._explain = self._global_options.explain self._kill_nailguns = self._global_options.kill_nailguns # V1 tasks do not understand FilesystemSpecs, so we eagerly convert them into AddressSpecs. if self._specs.filesystem_specs.dependencies: (owned_addresses, ) = self._graph_session.scheduler_session.product_request( Addresses, [ Params(self._specs.filesystem_specs, self._options_bootstrapper) ]) updated_address_specs = AddressSpecs( dependencies=tuple( SingleAddress(a.spec_path, a.target_name) for a in owned_addresses), tags=self._specs.address_specs.matcher.tags, exclude_patterns=self._specs.address_specs.matcher. exclude_patterns, ) self._specs = Specs( address_specs=updated_address_specs, filesystem_specs=FilesystemSpecs([]), )
def test_more_complicated_engine_aware(rule_runner: RuleRunner, run_tracker: RunTracker) -> None: tracker = WorkunitTracker() handler = StreamingWorkunitHandler( rule_runner.scheduler, run_tracker=run_tracker, callbacks=[tracker.add], report_interval_seconds=0.01, max_workunit_verbosity=LogLevel.TRACE, specs=Specs.empty(), options_bootstrapper=create_options_bootstrapper([]), ) with handler.session(): input_1 = CreateDigest(( FileContent(path="a.txt", content=b"alpha"), FileContent(path="b.txt", content=b"beta"), )) digest_1 = rule_runner.request(Digest, [input_1]) snapshot_1 = rule_runner.request(Snapshot, [digest_1]) input_2 = CreateDigest((FileContent(path="g.txt", content=b"gamma"), )) digest_2 = rule_runner.request(Digest, [input_2]) snapshot_2 = rule_runner.request(Snapshot, [digest_2]) input = ComplicatedInput(snapshot_1=snapshot_1, snapshot_2=snapshot_2) rule_runner.request(Output, [input]) finished = list( itertools.chain.from_iterable(tracker.finished_workunit_chunks)) workunit = next( item for item in finished if item["name"] == "pants.engine.internals.engine_test.a_rule") streaming_workunit_context = handler._context artifacts = workunit["artifacts"] output_snapshot_1 = artifacts["snapshot_1"] output_snapshot_2 = artifacts["snapshot_2"] output_contents_list = streaming_workunit_context.snapshots_to_file_contents( [output_snapshot_1, output_snapshot_2]) assert len(output_contents_list) == 2 assert isinstance(output_contents_list[0], DigestContents) assert isinstance(output_contents_list[1], DigestContents) digest_contents_1 = output_contents_list[0] digest_contents_2 = output_contents_list[1] assert len(tuple(x for x in digest_contents_1 if x.content == b"alpha")) == 1 assert len(tuple(x for x in digest_contents_1 if x.content == b"beta")) == 1 assert len(tuple(x for x in digest_contents_2 if x.content == b"gamma")) == 1
def calculate_specs( options_bootstrapper: OptionsBootstrapper, options: Options, session: SchedulerSession, *, build_root: Optional[str] = None, ) -> Specs: """Determine the specs for a given Pants run.""" build_root = build_root or get_buildroot() specs = SpecsParser(build_root).parse_specs(options.specs) changed_options = ChangedOptions.from_options(options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if specs.provided and changed_options.provided: changed_name = "--changed-since" if changed_options.since else "--changed-diffspec" if specs.filesystem_specs and specs.address_specs: specs_description = "target and file arguments" elif specs.filesystem_specs: specs_description = "file arguments" else: specs_description = "target arguments" raise InvalidSpecConstraint( f"You used `{changed_name}` at the same time as using {specs_description}. Please " "use only one.") if not changed_options.provided: return specs git = get_git() if not git: raise InvalidSpecConstraint( "The `--changed-*` options are only available if Git is used for the repository." ) changed_request = ChangedRequest( sources=tuple(changed_options.changed_files(git)), dependees=changed_options.dependees, ) (changed_addresses, ) = session.product_request( ChangedAddresses, [Params(changed_request, options_bootstrapper)]) logger.debug("changed addresses: %s", changed_addresses) address_specs = [] for address in cast(ChangedAddresses, changed_addresses): address_input = AddressInput.parse(address.spec) address_specs.append( AddressLiteralSpec( path_component=address_input.path_component, # NB: AddressInput.target_component may be None, but AddressLiteralSpec expects a # string. target_component=address_input.target_component or address.target_name, )) return Specs(AddressSpecs(address_specs, filter_by_global_options=True), FilesystemSpecs([]))
def create( cls, options: Options, session: SchedulerSession, build_root: Optional[str] = None, exclude_patterns: Optional[Iterable[str]] = None, tags: Optional[Iterable[str]] = None, ) -> Specs: specs = cls.parse_specs( raw_specs=options.specs, build_root=build_root, exclude_patterns=exclude_patterns, tags=tags, ) changed_options = ChangedOptions.from_options( options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if changed_options.is_actionable( ) and specs.provided_specs.dependencies: # We've been provided both a change request and specs. raise InvalidSpecConstraint( "Multiple target selection methods provided. Please use only one of " "`--changed-*`, address specs, or filesystem specs.") if changed_options.is_actionable(): scm = get_scm() if not scm: raise InvalidSpecConstraint( "The `--changed-*` options are not available without a recognized SCM (usually Git)." ) changed_request = ChangedRequest( sources=tuple(changed_options.changed_files(scm=scm)), include_dependees=changed_options.include_dependees, ) (changed_addresses, ) = session.product_request( ChangedAddresses, [changed_request]) logger.debug("changed addresses: %s", changed_addresses.addresses) dependencies = tuple( SingleAddress(a.spec_path, a.target_name) for a in changed_addresses.addresses) return Specs( address_specs=AddressSpecs( dependencies=dependencies, exclude_patterns=exclude_patterns, tags=tags, ), filesystem_specs=FilesystemSpecs([]), ) return specs
def warm_product_graph(self, target_roots): """Warm the scheduler's `ProductGraph` with `TransitiveHydratedTargets` products. :param TargetRoots target_roots: The targets root of the request. """ logger.debug('warming target_roots for: %r', target_roots) subjects = [Specs(tuple(target_roots.specs))] request = self.scheduler.execution_request([TransitiveHydratedTargets], subjects) result = self.scheduler.execute(request) if result.error: raise result.error
def test_duplicated(self): """Test that matching the same Spec twice succeeds.""" address = SingleAddress('a', 'a') snapshot = Snapshot(Digest('xx', 2), ('a/BUILD',), ()) address_family = AddressFamily('a', {'a': ('a/BUILD', 'this is an object!')}) specs = Specs([address, address]) bfas = self._resolve_build_file_addresses( specs, address_family, snapshot, self._address_mapper()) self.assertEqual(len(bfas.dependencies), 1) self.assertEqual(bfas.dependencies[0].spec, 'a:a')
def __init__( self, targets: Iterable[Target], specs: Specs, union_membership: UnionMembership, *, applicable_target_types: Iterable[type[Target]], goal_description: str, ) -> None: applicable_target_aliases = sorted( {target_type.alias for target_type in applicable_target_types}) inapplicable_target_aliases = sorted({tgt.alias for tgt in targets}) msg = ("No applicable files or targets matched." if inapplicable_target_aliases else "No files or targets specified.") msg += (f" {goal_description.capitalize()} works " f"with these target types:\n\n" f"{bullet_list(applicable_target_aliases)}\n\n") # Explain what was specified, if relevant. if inapplicable_target_aliases: specs_description = specs.arguments_provided_description() or "" if specs_description: specs_description = f" {specs_description} with" msg += ( f"However, you only specified{specs_description} these target types:\n\n" f"{bullet_list(inapplicable_target_aliases)}\n\n") # Add a remedy. # # We sometimes suggest using `./pants filedeps` to find applicable files. However, this # command only works if at least one of the targets has a SourcesField field. # # NB: Even with the "secondary owners" mechanism - used by target types like `pex_binary` # and `python_awslambda` to still work with file args - those targets will not show the # associated files when using filedeps. filedeps_goal_works = any( tgt.class_has_field(SourcesField, union_membership) for tgt in applicable_target_types) pants_filter_command = ( f"{bin_name()} --filter-target-type={','.join(applicable_target_aliases)}" ) remedy = ( f"Please specify relevant file and/or target arguments. Run `{pants_filter_command} " f"list ::` to find all applicable targets in your project") if filedeps_goal_works: remedy += f", or run `{pants_filter_command} filedeps ::` to find all applicable files." else: remedy += "." msg += remedy super().__init__(msg)
def test_exclude_pattern_with_single_address(self): """Test that single address targets are filtered based on exclude patterns.""" specs = Specs([SingleAddress('root', 'not_me')], exclude_patterns=tuple(['root.*'])) address_family = AddressFamily('root', { 'not_me': ('root/BUILD', TargetAdaptor()), } ) targets = self._resolve_build_file_addresses( specs, address_family, self._snapshot(), self._address_mapper()) self.assertEqual(len(targets.dependencies), 0)
def resolve_address_specs( self, specs: Iterable[AddressSpec], bootstrapper: Optional[OptionsBootstrapper] = None ) -> Set[AddressWithOrigin]: result = self.request_single_product( AddressesWithOrigins, Params( Specs(AddressSpecs(specs, filter_by_global_options=True), FilesystemSpecs([])), bootstrapper or create_options_bootstrapper(), ), ) return set(result)
def parse_specs(self, specs: Iterable[str]) -> Specs: address_specs: OrderedSet[AddressSpec] = OrderedSet() filesystem_specs: OrderedSet[FilesystemSpec] = OrderedSet() for spec_str in specs: parsed_spec = self.parse_spec(spec_str) if isinstance(parsed_spec, AddressSpec): address_specs.add(parsed_spec) else: filesystem_specs.add(parsed_spec) return Specs( AddressSpecs(address_specs, filter_by_global_options=True), FilesystemSpecs(filesystem_specs), )
def test_duplicated(self): """Test that matching the same Spec twice succeeds.""" address = SingleAddress('a', 'a') address_mapper = AddressMapper(JsonParser(TestTable())) snapshot = Snapshot(DirectoryDigest(str('xx'), 2), (Path('a/BUILD', File('a/BUILD')),)) address_family = AddressFamily('a', {'a': ('a/BUILD', 'this is an object!')}) bfas = run_rule(addresses_from_address_families, address_mapper, Specs([address, address]), { (Snapshot, PathGlobs): lambda _: snapshot, (AddressFamily, Dir): lambda _: address_family, }) self.assertEquals(len(bfas.dependencies), 1) self.assertEquals(bfas.dependencies[0].spec, 'a:a')
def test_exclude_pattern(self): """Test that targets are filtered based on exclude patterns.""" specs = Specs([SiblingAddresses('root')], exclude_patterns=tuple(['.exclude*'])) address_family = AddressFamily('root', {'exclude_me': ('root/BUILD', TargetAdaptor()), 'not_me': ('root/BUILD', TargetAdaptor()), } ) targets = self._resolve_build_file_addresses( specs, address_family, self._snapshot(), self._address_mapper()) self.assertEqual(len(targets.dependencies), 1) self.assertEqual(targets.dependencies[0].spec, 'root:not_me')
def _inject_specs(self, subjects): """Injects targets into the graph for each of the given `Spec` objects. Yields the resulting addresses. """ logger.debug('Injecting specs to %s: %s', self, subjects) with self._resolve_context(): specs = tuple(subjects) thts, = self._scheduler.product_request(TransitiveHydratedTargets, [Specs(specs)]) self._index(thts.closure) for hydrated_target in thts.roots: yield hydrated_target.address
def test_tag_filter(self): """Test that targets are filtered based on `tags`.""" specs = Specs([SiblingAddresses('root')], tags=['+integration']) address_family = AddressFamily('root', {'a': ('root/BUILD', TargetAdaptor()), 'b': ('root/BUILD', TargetAdaptor(tags={'integration'})), 'c': ('root/BUILD', TargetAdaptor(tags={'not_integration'})) } ) targets = self._resolve_build_file_addresses( specs, address_family, self._snapshot(), self._address_mapper()) self.assertEqual(len(targets.dependencies), 1) self.assertEqual(targets.dependencies[0].spec, 'root:b')
def assert_poetry_requirements( rule_runner: RuleRunner, build_file_entry: str, pyproject_toml: str, *, expected_file_dep: PythonRequirementsFile, expected_targets: Iterable[PythonRequirementLibrary], pyproject_toml_relpath: str = "pyproject.toml", ) -> None: rule_runner.write_files({"BUILD": build_file_entry, pyproject_toml_relpath: pyproject_toml}) targets = rule_runner.request( Targets, [Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([]))], ) assert {expected_file_dep, *expected_targets} == set(targets)
def assert_python_requirements( rule_runner: RuleRunner, build_file_entry: str, requirements_txt: str, *, expected_file_dep: PythonRequirementsFile, expected_targets: Iterable[PythonRequirementLibrary], requirements_txt_relpath: str = "requirements.txt", ) -> None: rule_runner.write_files({"BUILD": build_file_entry, requirements_txt_relpath: requirements_txt}) targets = rule_runner.request( Targets, [Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([]))], ) assert {expected_file_dep, *expected_targets} == set(targets)
def _fixture_for_rules( self, rules, max_workunit_verbosity: LogLevel = LogLevel.INFO ) -> Tuple[SchedulerSession, WorkunitTracker, StreamingWorkunitHandler]: scheduler = self.mk_scheduler(rules, include_trace_on_error=False) tracker = WorkunitTracker() handler = StreamingWorkunitHandler( scheduler, run_tracker=new_run_tracker(), callbacks=[tracker], report_interval_seconds=0.01, max_workunit_verbosity=max_workunit_verbosity, specs=Specs.empty(), options_bootstrapper=create_options_bootstrapper([]), pantsd=False, ) return scheduler, tracker, handler
def test_exclude_pattern_with_single_address(self): """Test that single address targets are filtered based on exclude patterns.""" spec = SingleAddress('root', 'not_me') address_mapper = AddressMapper(JsonParser(TestTable())) snapshot = Snapshot(DirectoryDigest('xx', 2), (Path('root/BUILD', File('root/BUILD')),)) address_family = AddressFamily('root', { 'not_me': ('root/BUILD', TargetAdaptor()), } ) targets = run_rule( addresses_from_address_families, address_mapper, Specs([spec], exclude_patterns=tuple(['root.*'])),{ (Snapshot, PathGlobs): lambda _: snapshot, (AddressFamily, Dir): lambda _: address_family, }) self.assertEqual(len(targets.dependencies), 0)
def assert_pipenv_requirements( rule_runner: RuleRunner, build_file_entry: str, pipfile_lock: dict, *, expected_file_dep: PythonRequirementsFile, expected_targets: Iterable[PythonRequirementLibrary], pipfile_lock_relpath: str = "Pipfile.lock", ) -> None: rule_runner.add_to_build_file("", f"{build_file_entry}\n") rule_runner.create_file(pipfile_lock_relpath, dumps(pipfile_lock)) targets = rule_runner.request( Targets, [Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([]))], ) assert {expected_file_dep, *expected_targets} == set(targets)
def test_counters(rule_runner: RuleRunner, run_tracker: RunTracker) -> None: scheduler = rule_runner.scheduler tracker = WorkunitTracker() handler = StreamingWorkunitHandler( scheduler, run_tracker=run_tracker, callbacks=[tracker], report_interval_seconds=0.01, max_workunit_verbosity=LogLevel.TRACE, specs=Specs.empty(), options_bootstrapper=create_options_bootstrapper([]), allow_async_completion=False, ) with handler: scheduler.record_test_observation(128) rule_runner.request( ProcessResult, [ Process( ["/bin/sh", "-c", "true"], description="always true", cache_scope=ProcessCacheScope.PER_SESSION, ) ], ) histograms_info = scheduler.get_observation_histograms() finished = list( itertools.chain.from_iterable(tracker.finished_workunit_chunks)) workunits_with_counters = [item for item in finished if "counters" in item] assert workunits_with_counters[0]["counters"]["local_cache_requests"] == 1 assert workunits_with_counters[0]["counters"][ "local_cache_requests_uncached"] == 1 assert workunits_with_counters[1]["counters"][ "local_execution_requests"] == 1 assert histograms_info["version"] == 0 assert "histograms" in histograms_info assert "test_observation" in histograms_info["histograms"] assert ( histograms_info["histograms"]["test_observation"] == b"\x1c\x84\x93\x14\x00\x00\x00\x1fx\x9c\x93i\x99,\xcc\xc0\xc0\xc0\xcc\x00\x010\x9a\x11J3\xd9\x7f\x800\xfe32\x01\x00E\x0c\x03\x81" )