def __init__(self, *args, **kwargs): super(Filter, self).__init__(*args, **kwargs) self._filters = [] def _get_targets(spec_str): spec_parser = CmdLineSpecParser(get_buildroot()) try: spec = spec_parser.parse_spec(spec_str) addresses = self.context.address_mapper.scan_specs([spec]) except AddressLookupError as e: raise TaskError( "Failed to parse address selector: {spec_str}\n {message}".format(spec_str=spec_str, message=e) ) # filter specs may not have been parsed as part of the context: force parsing matches = set() for address in addresses: self.context.build_graph.inject_address_closure(address) matches.add(self.context.build_graph.get_target(address)) if not matches: raise TaskError("No matches for address selector: {spec_str}".format(spec_str=spec_str)) return matches def filter_for_address(spec): matches = _get_targets(spec) return lambda target: target in matches self._filters.extend(create_filters(self.get_options().target, filter_for_address)) def filter_for_type(name): target_types = self.target_types_for_alias(name) return lambda target: isinstance(target, tuple(target_types)) self._filters.extend(create_filters(self.get_options().type, filter_for_type)) def filter_for_ancestor(spec): ancestors = _get_targets(spec) children = set() for ancestor in ancestors: ancestor.walk(children.add) return lambda target: target in children self._filters.extend(create_filters(self.get_options().ancestor, filter_for_ancestor)) def filter_for_regex(regex): try: parser = re.compile(regex) except re.error as e: raise TaskError("Invalid regular expression: {}: {}".format(regex, e)) return lambda target: parser.search(str(target.address.spec)) self._filters.extend(create_filters(self.get_options().regex, filter_for_regex)) def filter_for_tag_regex(tag_regex): try: regex = re.compile(tag_regex) except re.error as e: raise TaskError("Invalid regular expression: {}: {}".format(tag_regex, e)) return lambda target: any(map(regex.search, map(str, target.tags))) self._filters.extend(create_filters(self.get_options().tag_regex, filter_for_tag_regex))
def _expand_goals_and_specs(self): goals = self.options.goals specs = self.options.target_specs fail_fast = self.options.for_global_scope().fail_fast for goal in goals: if self.address_mapper.from_cache(get_buildroot(), goal, must_exist=False).file_exists(): logger.warning(" Command-line argument '{0}' is ambiguous and was assumed to be " "a goal. If this is incorrect, disambiguate it with ./{0}.".format(goal)) if self.options.print_help_if_requested(): sys.exit(0) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): spec_parser = CmdLineSpecParser(self.root_dir, self.address_mapper, spec_excludes=self.spec_excludes, exclude_target_regexps=self.global_options.exclude_target_regexp) with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) tag_filter = wrap_filters(create_filters(self.global_options.tag, filter_for_tag)) for spec in specs: for address in spec_parser.parse_addresses(spec, fail_fast): self.build_graph.inject_address_closure(address) tgt = self.build_graph.get_target(address) if tag_filter(tgt): self.targets.append(tgt) self.goals = [Goal.by_name(goal) for goal in goals]
def test_wrap_filters(self): divides_by_6 = wrap_filters(create_filters(['2', '3'], self._divides_by)) self.assertFalse(divides_by_6(2)) self.assertFalse(divides_by_6(3)) self.assertTrue(divides_by_6(6)) self.assertFalse(divides_by_6(9)) self.assertTrue(divides_by_6(12))
def test_create_filters(self): # This tests that create_filters() properly captures different closures. divides_by_2, divides_by_3 = create_filters(['2', '3'], self._divides_by) self.assertTrue(divides_by_2(2)) self.assertFalse(divides_by_2(3)) self.assertTrue(divides_by_2(4)) self.assertTrue(divides_by_2(6)) self.assertFalse(divides_by_3(2)) self.assertTrue(divides_by_3(3)) self.assertFalse(divides_by_3(4)) self.assertTrue(divides_by_3(6))
def _expand_specs(self, specs, fail_fast): """Populate the BuildGraph and target list from a set of input specs.""" with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]): def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) tag_filter = wrap_filters(create_filters(self._tag, filter_for_tag)) for spec in specs: for address in self._spec_parser.parse_addresses(spec, fail_fast): self._build_graph.inject_address_closure(address) target = self._build_graph.get_target(address) if tag_filter(target): self._targets.append(target)
def _roots_to_targets(self, target_roots): """Populate the BuildGraph and target list from a set of input TargetRoots.""" with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]): def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) tag_filter = wrap_filters(create_filters(self._tag, filter_for_tag)) def generate_targets(): for address in self._build_graph.inject_roots_closure(target_roots, self._fail_fast): target = self._build_graph.get_target(address) if tag_filter(target): yield target return list(generate_targets())
def _expand_specs(self, specs, fail_fast): """Populate the BuildGraph and target list from a set of input specs.""" with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]): def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) tag_filter = wrap_filters(create_filters(self._tag, filter_for_tag)) for spec in specs: for address in self._spec_parser.parse_addresses( spec, fail_fast): self._build_graph.inject_address_closure(address) target = self._build_graph.get_target(address) if tag_filter(target): self._targets.append(target)
def _specs_to_targets(self, specs): """Populate the BuildGraph and target list from a set of input specs.""" with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]): def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) tag_filter = wrap_filters(create_filters(self._tag, filter_for_tag)) def generate_targets(specs): for address in self._build_graph.inject_specs_closure( specs, self._fail_fast): target = self._build_graph.get_target(address) if tag_filter(target): yield target return list(generate_targets(specs))
def target_type_filters( self, registered_target_types: RegisteredTargetTypes ) -> list[TargetFilter]: def outer_filter(target_alias: str) -> TargetFilter: if target_alias not in registered_target_types.aliases: raise UnrecognizedTargetTypeException(target_alias, registered_target_types) target_type = registered_target_types.aliases_to_types[target_alias] if target_type.deprecated_alias and target_alias == target_type.deprecated_alias: warn_deprecated_target_type(target_type) def inner_filter(tgt: Target) -> bool: return tgt.alias == target_alias or bool( tgt.deprecated_alias and tgt.deprecated_alias == target_alias ) return inner_filter return create_filters(self.target_type, outer_filter)
def _expand_specs(self, spec_strs, fail_fast): """Populate the BuildGraph and target list from a set of input specs.""" with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]): def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) tag_filter = wrap_filters(create_filters(self._tag, filter_for_tag)) # Parse all specs into unique Spec objects. spec_parser = CmdLineSpecParser(self._root_dir) specs = OrderedSet() for spec_str in spec_strs: specs.add(spec_parser.parse_spec(spec_str)) # Then scan them to generate unique Addresses. for address in self._build_graph.inject_specs_closure(specs, fail_fast): target = self._build_graph.get_target(address) if tag_filter(target): self._targets.append(target)
def _expand_goals_and_specs(self): goals = self.options.goals specs = self.options.target_specs fail_fast = self.options.for_global_scope().fail_fast for goal in goals: if self.address_mapper.from_cache(get_buildroot(), goal, must_exist=False).file_exists(): logger.warning( " Command-line argument '{0}' is ambiguous and was assumed to be " "a goal. If this is incorrect, disambiguate it with ./{0}." .format(goal)) if self.options.print_help_if_requested(): self._exiter(0) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnitLabel.SETUP]): spec_parser = CmdLineSpecParser( self.root_dir, self.address_mapper, spec_excludes=self.spec_excludes, exclude_target_regexps=self.global_options. exclude_target_regexp) with self.run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]): def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) tag_filter = wrap_filters( create_filters(self.global_options.tag, filter_for_tag)) for spec in specs: for address in spec_parser.parse_addresses( spec, fail_fast): self.build_graph.inject_address_closure(address) tgt = self.build_graph.get_target(address) if tag_filter(tgt): self.targets.append(tgt) self.goals = [Goal.by_name(goal) for goal in goals]
def _expand_specs(self, spec_strs, fail_fast): """Populate the BuildGraph and target list from a set of input specs.""" with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]): def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) tag_filter = wrap_filters(create_filters(self._tag, filter_for_tag)) # Parse all specs into unique Spec objects. spec_parser = CmdLineSpecParser(self._root_dir) specs = OrderedSet() for spec_str in spec_strs: specs.add(spec_parser.parse_spec(spec_str)) # Then scan them to generate unique Addresses. for address in self._address_mapper.scan_specs(specs, fail_fast, self._spec_excludes): self._build_graph.inject_address_closure(address) target = self._build_graph.get_target(address) if tag_filter(target): self._targets.append(target)
def _target_tag_matches(self): def filter_for_tag(tag): return lambda t: tag in [str(t_tag) for t_tag in t.kwargs().get("tags", [])] return wrap_filters(create_filters(self.tags, filter_for_tag))
def __init__(self, *args, **kwargs): super(Filter, self).__init__(*args, **kwargs) self._filters = [] def _get_targets(spec): try: spec_parser = CmdLineSpecParser(get_buildroot(), self.context.address_mapper) addresses = spec_parser.parse_addresses(spec) except AddressLookupError as e: raise TaskError( 'Failed to parse address selector: {spec}\n {message}'. format(spec=spec, message=e)) # filter specs may not have been parsed as part of the context: force parsing matches = set() for address in addresses: self.context.build_graph.inject_address_closure(address) matches.add(self.context.build_graph.get_target(address)) if not matches: raise TaskError( 'No matches for address selector: {spec}'.format( spec=spec)) return matches def filter_for_address(spec): matches = _get_targets(spec) return lambda target: target in matches self._filters.extend( create_filters(self.get_options().target, filter_for_address)) def filter_for_type(name): target_types = self.target_types_for_alias(name) return lambda target: isinstance(target, tuple(target_types)) self._filters.extend( create_filters(self.get_options().type, filter_for_type)) def filter_for_ancestor(spec): ancestors = _get_targets(spec) children = set() for ancestor in ancestors: ancestor.walk(children.add) return lambda target: target in children self._filters.extend( create_filters(self.get_options().ancestor, filter_for_ancestor)) def filter_for_regex(regex): try: parser = re.compile(regex) except re.error as e: raise TaskError("Invalid regular expression: {}: {}".format( regex, e)) return lambda target: parser.search(str(target.address.spec)) self._filters.extend( create_filters(self.get_options().regex, filter_for_regex)) def filter_for_tag_regex(tag_regex): try: regex = re.compile(tag_regex) except re.error as e: raise TaskError("Invalid regular expression: {}: {}".format( tag_regex, e)) return lambda target: any(map(regex.search, map(str, target.tags))) self._filters.extend( create_filters(self.get_options().tag_regex, filter_for_tag_regex)) def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) self._filters.extend( create_filters(self.get_options().tag, filter_for_tag))
def __init__(self, *args, **kwargs): super(Filter, self).__init__(*args, **kwargs) self._filters = [] def _get_targets(spec): try: spec_parser = CmdLineSpecParser(get_buildroot(), self.context.address_mapper) addresses = spec_parser.parse_addresses(spec) except AddressLookupError as e: raise TaskError('Failed to parse address selector: {spec}\n {message}'.format(spec=spec, message=e)) # filter specs may not have been parsed as part of the context: force parsing matches = set() for address in addresses: self.context.build_graph.inject_address_closure(address) matches.add(self.context.build_graph.get_target(address)) if not matches: raise TaskError('No matches for address selector: {spec}'.format(spec=spec)) return matches def filter_for_address(spec): matches = _get_targets(spec) return lambda target: target in matches self._filters.extend(create_filters(self.get_options().target, filter_for_address)) def filter_for_type(name): # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser # machinery try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = name.rsplit('.', 2) module = __import__('{}.{}'.format(from_list, module), fromlist=[from_list]) target_type = getattr(module, type_name) except (ImportError, ValueError): # Fall back on pants provided target types. registered_aliases = self.context.build_file_parser.registered_aliases() if name not in registered_aliases.targets: raise TaskError('Invalid type name: {}'.format(name)) target_type = registered_aliases.targets[name] if not issubclass(target_type, Target): raise TaskError('Not a Target type: {}'.format(name)) return lambda target: isinstance(target, target_type) self._filters.extend(create_filters(self.get_options().type, filter_for_type)) def filter_for_ancestor(spec): ancestors = _get_targets(spec) children = set() for ancestor in ancestors: ancestor.walk(children.add) return lambda target: target in children self._filters.extend(create_filters(self.get_options().ancestor, filter_for_ancestor)) def filter_for_regex(regex): try: parser = re.compile(regex) except re.error as e: raise TaskError("Invalid regular expression: {}: {}".format(regex, e)) return lambda target: parser.search(str(target.address.spec)) self._filters.extend(create_filters(self.get_options().regex, filter_for_regex)) def filter_for_tag_regex(tag_regex): try: regex = re.compile(tag_regex) except re.error as e: raise TaskError("Invalid regular expression: {}: {}".format(tag_regex, e)) return lambda target: any(map(regex.search, map(str, target.tags))) self._filters.extend(create_filters(self.get_options().tag_regex, filter_for_tag_regex)) def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) self._filters.extend(create_filters(self.get_options().tag, filter_for_tag))
def address_regex_filters(self) -> list[TargetFilter]: def outer_filter(address_regex: str) -> TargetFilter: regex = compile_regex(address_regex) return lambda tgt: bool(regex.search(tgt.address.spec)) return create_filters(self.address_regex, outer_filter)
def tag_regex_filters(self) -> list[TargetFilter]: def outer_filter(tag_regex: str) -> TargetFilter: regex = compile_regex(tag_regex) return lambda tgt: any(bool(regex.search(tag)) for tag in tgt.get(Tags).value or ()) return create_filters(self.tag_regex, outer_filter)
def addresses_from_address_families(address_mapper, specs): """Given an AddressMapper and list of Specs, return matching BuildFileAddresses. Raises a AddressLookupError if: - there were no matching AddressFamilies, or - the Spec matches no addresses for SingleAddresses. """ # Capture a Snapshot covering all paths for these Specs, then group by directory. snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs)) dirnames = set(dirname(f.stat.path) for f in snapshot.files) address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames] # NB: `@memoized` does not work on local functions. def by_directory(): if by_directory.cached is None: by_directory.cached = {af.namespace: af for af in address_families} return by_directory.cached by_directory.cached = None def raise_empty_address_family(spec): raise ResolveError('Path "{}" does not contain any BUILD files.'.format(spec.directory)) def exclude_address(spec): if specs.exclude_patterns: return any(p.search(spec) is not None for p in specs.exclude_patterns_memo()) return False def filter_for_tag(tag): return lambda t: tag in [str(t_tag) for t_tag in t.kwargs().get("tags", [])] include_target = wrap_filters(create_filters(specs.tags if specs.tags else '', filter_for_tag)) addresses = [] included = set() def include(address_families, predicate=None): matched = False for af in address_families: for (a, t) in af.addressables.items(): if (predicate is None or predicate(a)): if include_target(t) and (not exclude_address(a.spec)): matched = True if a not in included: addresses.append(a) included.add(a) return matched for spec in specs.dependencies: if type(spec) is DescendantAddresses: matched = include( af for af in address_families if fast_relpath_optional(af.namespace, spec.directory) is not None ) if not matched: raise AddressLookupError( 'Spec {} does not match any targets.'.format(spec)) elif type(spec) is SiblingAddresses: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) include([address_family]) elif type(spec) is SingleAddress: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) # spec.name here is generally the root node specified on commandline. equality here implies # a root node i.e. node specified on commandline. if not include([address_family], predicate=lambda a: a.target_name == spec.name): if len(addresses) == 0: _raise_did_you_mean(address_family, spec.name) elif type(spec) is AscendantAddresses: include( af for af in address_families if fast_relpath_optional(spec.directory, af.namespace) is not None ) else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) yield BuildFileAddresses(addresses)
def addresses_from_address_families(address_mapper, specs): """Given an AddressMapper and list of Specs, return matching BuildFileAddresses. Raises a AddressLookupError if: - there were no matching AddressFamilies, or - the Spec matches no addresses for SingleAddresses. """ # Capture a Snapshot covering all paths for these Specs, then group by directory. snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs)) dirnames = set(dirname(f.stat.path) for f in snapshot.files) address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames] # NB: `@memoized` does not work on local functions. def by_directory(): if by_directory.cached is None: by_directory.cached = {af.namespace: af for af in address_families} return by_directory.cached by_directory.cached = None def raise_empty_address_family(spec): raise ResolveError('Path "{}" does not contain any BUILD files.'.format(spec.directory)) def exclude_address(spec): if specs.exclude_patterns: return any(p.search(spec) is not None for p in specs.exclude_patterns_memo()) return False def filter_for_tag(tag): return lambda t: tag in map(str, t.kwargs().get("tags", [])) include_target = wrap_filters(create_filters(specs.tags if specs.tags else '', filter_for_tag)) addresses = [] included = set() def include(address_families, predicate=None): matched = False for af in address_families: for (a, t) in af.addressables.items(): if (predicate is None or predicate(a)): if include_target(t) and (not exclude_address(a.spec)): matched = True if a not in included: addresses.append(a) included.add(a) return matched for spec in specs.dependencies: if type(spec) is DescendantAddresses: matched = include( af for af in address_families if fast_relpath_optional(af.namespace, spec.directory) is not None ) if not matched: raise AddressLookupError( 'Spec {} does not match any targets.'.format(spec)) elif type(spec) is SiblingAddresses: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) include([address_family]) elif type(spec) is SingleAddress: address_family = by_directory().get(spec.directory) if not address_family: raise_empty_address_family(spec) # spec.name here is generally the root node specified on commandline. equality here implies # a root node i.e. node specified on commandline. if not include([address_family], predicate=lambda a: a.target_name == spec.name): if len(addresses) == 0: _raise_did_you_mean(address_family, spec.name) elif type(spec) is AscendantAddresses: include( af for af in address_families if fast_relpath_optional(spec.directory, af.namespace) is not None ) else: raise ValueError('Unrecognized Spec type: {}'.format(spec)) yield BuildFileAddresses(addresses)
def __init__(self, *args, **kwargs): super(Filter, self).__init__(*args, **kwargs) self._filters = [] def _get_targets(spec): try: spec_parser = CmdLineSpecParser(get_buildroot(), self.context.address_mapper) addresses = spec_parser.parse_addresses(spec) except AddressLookupError as e: raise TaskError( 'Failed to parse address selector: {spec}\n {message}'. format(spec=spec, message=e)) # filter specs may not have been parsed as part of the context: force parsing matches = set() for address in addresses: self.context.build_graph.inject_address_closure(address) matches.add(self.context.build_graph.get_target(address)) if not matches: raise TaskError( 'No matches for address selector: {spec}'.format( spec=spec)) return matches def filter_for_address(spec): matches = _get_targets(spec) return lambda target: target in matches self._filters.extend( create_filters(self.get_options().target, filter_for_address)) def filter_for_type(name): # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser # machinery try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = name.rsplit('.', 2) module = __import__('{}.{}'.format(from_list, module), fromlist=[from_list]) target_type = getattr(module, type_name) except (ImportError, ValueError): # Fall back on pants provided target types. registered_aliases = self.context.build_file_parser.registered_aliases( ) if name not in registered_aliases.targets: raise TaskError('Invalid type name: {}'.format(name)) target_type = registered_aliases.targets[name] if not issubclass(target_type, Target): raise TaskError('Not a Target type: {}'.format(name)) return lambda target: isinstance(target, target_type) self._filters.extend( create_filters(self.get_options().type, filter_for_type)) def filter_for_ancestor(spec): ancestors = _get_targets(spec) children = set() for ancestor in ancestors: ancestor.walk(children.add) return lambda target: target in children self._filters.extend( create_filters(self.get_options().ancestor, filter_for_ancestor)) def filter_for_regex(regex): try: parser = re.compile(regex) except re.error as e: raise TaskError("Invalid regular expression: {}: {}".format( regex, e)) return lambda target: parser.search(str(target.address.spec)) self._filters.extend( create_filters(self.get_options().regex, filter_for_regex)) def filter_for_tag_regex(tag_regex): try: regex = re.compile(tag_regex) except re.error as e: raise TaskError("Invalid regular expression: {}: {}".format( tag_regex, e)) return lambda target: any(map(regex.search, map(str, target.tags))) self._filters.extend( create_filters(self.get_options().tag_regex, filter_for_tag_regex)) def filter_for_tag(tag): return lambda target: tag in map(str, target.tags) self._filters.extend( create_filters(self.get_options().tag, filter_for_tag))