def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser # machinery try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit('.', 2) module = __import__('%s.%s' % (from_list, module), fromlist=[from_list]) target_type = getattr(module, type_name) except (ImportError, ValueError): # Fall back on pants provided target types. registered_aliases = self.context.build_file_parser.registered_aliases() if dependees_type not in registered_aliases.targets: raise TaskError('Invalid type name: %s' % dependees_type) target_type = registered_aliases.targets[dependees_type] # Try to find the SourceRoot for the given input type try: roots = SourceRoot.roots(target_type) base_paths.update(roots) except KeyError: pass if not base_paths: raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), os.path.join(get_buildroot(), base_path))) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser dependees_by_target = defaultdict(set) for build_file in buildfiles: build_file_parser.parse_build_file(build_file) for address in build_file_parser.addresses_by_build_file[build_file]: build_file_parser.inject_spec_closure_into_build_graph(address.spec, build_graph) for address in build_file_parser.addresses_by_build_file[build_file]: target = build_graph.get_target(address) # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) for dependency in target.dependencies: dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield root.address.spec for dependant in self.get_dependants(dependees_by_target, roots): yield dependant.address.spec
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: target_aliases = self.context.build_file_parser.registered_aliases( ).targets if dependees_type not in target_aliases: raise TaskError('Invalid type name: %s' % dependees_type) target_type = target_aliases[dependees_type] # Try to find the SourceRoot for the given input type try: roots = SourceRoot.roots(target_type) base_paths.update(roots) except KeyError: pass if not base_paths: raise TaskError( 'No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update( BuildFile.scan_buildfiles( get_buildroot(), os.path.join(get_buildroot(), base_path), spec_excludes=self._spec_excludes)) else: buildfiles = BuildFile.scan_buildfiles( get_buildroot(), spec_excludes=self._spec_excludes) build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser dependees_by_target = defaultdict(set) for build_file in buildfiles: address_map = build_file_parser.parse_build_file(build_file) for address in address_map.keys(): build_graph.inject_address_closure(address) for address in address_map.keys(): target = build_graph.get_target(address) # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) for dependency in target.dependencies: dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield root.address.spec for dependant in self.get_dependants(dependees_by_target, roots): yield dependant.address.spec
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: target_aliases = self.context.build_file_parser.registered_aliases().targets if dependees_type not in target_aliases: raise TaskError('Invalid type name: %s' % dependees_type) target_type = target_aliases[dependees_type] # Try to find the SourceRoot for the given input type try: roots = SourceRoot.roots(target_type) base_paths.update(roots) except KeyError: pass if not base_paths: raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), os.path.join(get_buildroot(), base_path))) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser address_mapper = self.context.address_mapper dependees_by_target = defaultdict(set) for build_file in buildfiles: address_map = build_file_parser.parse_build_file(build_file) for address in address_map.keys(): build_graph.inject_address_closure(address) for address in address_map.keys(): target = build_graph.get_target(address) # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) for dependency in target.dependencies: dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield root.address.spec for dependant in self.get_dependants(dependees_by_target, roots): yield dependant.address.spec
def _parse_spec(self, spec): def normalize_spec_path(path): path = os.path.join(self._root_dir, path.lstrip('//')) normalized = os.path.relpath(os.path.realpath(path), self._root_dir) if normalized == '.': normalized = '' return normalized if spec.endswith('::'): spec_path = spec[:-len('::')] spec_dir = normalize_spec_path(spec_path) for build_file in BuildFile.scan_buildfiles(self._root_dir, spec_dir): self._build_file_parser.parse_build_file(build_file) for address in self._build_file_parser.addresses_by_build_file[build_file]: yield address elif spec.endswith(':'): spec_path = spec[:-len(':')] spec_dir = normalize_spec_path(spec_path) for build_file in BuildFile(self._root_dir, spec_dir).family(): self._build_file_parser.parse_build_file(build_file) for address in self._build_file_parser.addresses_by_build_file[build_file]: yield address else: spec_parts = spec.rsplit(':', 1) spec_parts[0] = normalize_spec_path(spec_parts[0]) spec_path, target_name = parse_spec(':'.join(spec_parts)) build_file = BuildFile(self._root_dir, spec_path) yield BuildFileAddress(build_file, target_name)
def test_directory_called_build_skipped(self): # Ensure the buildfiles found do not include grandparent/BUILD since it is a dir. buildfiles = BuildFile.scan_buildfiles(os.path.join(BuildFileTest.root_dir, 'grandparent')) self.assertEquals(OrderedSet([ BuildFileTest.buildfile('grandparent/parent/BUILD'), BuildFileTest.buildfile('grandparent/parent/BUILD.twitter'), BuildFileTest.buildfile('grandparent/parent/child1/BUILD'), BuildFileTest.buildfile('grandparent/parent/child1/BUILD.twitter'), BuildFileTest.buildfile('grandparent/parent/child2/child3/BUILD'), BuildFileTest.buildfile('grandparent/parent/child5/BUILD'), ]), buildfiles) def test_scan_buildfiles_exclude(self): buildfiles = BuildFile.scan_buildfiles( BuildFileTest.root_dir, '', spec_excludes=[ os.path.join(BuildFileTest.root_dir, 'grandparent/parent/child1'), os.path.join(BuildFileTest.root_dir, 'grandparent/parent/child2') ]) self.assertEquals([BuildFileTest.buildfile('BUILD'), BuildFileTest.buildfile('/BUILD.twitter'), BuildFileTest.buildfile('/grandparent/parent/BUILD'), BuildFileTest.buildfile('/grandparent/parent/BUILD.twitter'), BuildFileTest.buildfile('/grandparent/parent/child5/BUILD'), ], buildfiles)
def _addresses(self): if self.context.target_roots: for target in self.context.target_roots: yield target.address else: for buildfile in BuildFile.scan_buildfiles(self._root_dir): for address in Target.get_all_addresses(buildfile): yield address
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit('.', 2) __import__('%s.%s' % (from_list, module), fromlist=[from_list]) except (ImportError, ValueError): # Fall back on pants provided target types. if hasattr(pants.base.build_file_context, dependees_type): type_name = getattr(pants.base.build_file_context, dependees_type) else: raise TaskError('Invalid type name: %s' % dependees_type) # Find the SourceRoot for the given input type base_paths.update(SourceRoot.roots(type_name)) if not base_paths: raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), base_path)) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) dependees_by_target = defaultdict(set) for buildfile in buildfiles: for address in Target.get_all_addresses(buildfile): for target in Target.get(address).resolve(): # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) if hasattr(target, 'dependencies'): for dependencies in target.dependencies: for dependency in dependencies.resolve(): dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield str(root.address) for dependant in self.get_dependants(dependees_by_target, roots): yield str(dependant.address)
def scan_addresses(root_dir, base_path=None): """Parses all targets available in BUILD files under base_path and returns their addresses. If no base_path is specified, root_dir is assumed to be the base_path""" addresses = OrderedSet() for buildfile in BuildFile.scan_buildfiles(root_dir, base_path): addresses.update(Target.get_all_addresses(buildfile)) return addresses
def _find_targets(self): if len(self.context.target_roots) > 0: for target in self.context.target_roots: yield target else: for buildfile in BuildFile.scan_buildfiles(get_buildroot()): target_addresses = Target.get_all_addresses(buildfile) for target_address in target_addresses: yield Target.get(target_address)
def scan_addresses(self, root=None): """Recursively gathers all addresses visible under `root` of the virtual address space. :param path root: defaults to the root directory of the pants project. """ addresses = set() for build_file in BuildFile.scan_buildfiles(root or get_buildroot()): for address in self.addresses_in_spec_path(build_file.spec_path): addresses.add(address) return addresses
def test_directory_called_build_skipped(self): # Ensure the buildfiles found do not include grandparent/BUILD since it is a dir. buildfiles = BuildFile.scan_buildfiles(os.path.join(BuildFileTest.root_dir, 'grandparent')) self.assertEquals(OrderedSet([ BuildFileTest.buildfile('grandparent/parent/BUILD'), BuildFileTest.buildfile('grandparent/parent/BUILD.twitter'), BuildFileTest.buildfile('grandparent/parent/child1/BUILD'), BuildFileTest.buildfile('grandparent/parent/child1/BUILD.twitter'), BuildFileTest.buildfile('grandparent/parent/child2/child3/BUILD'), ]), buildfiles)
def _parse_spec(self, spec): def normalize_spec_path(path): is_abs = not path.startswith('//') and os.path.isabs(path) if is_abs: path = os.path.realpath(path) if os.path.commonprefix([self._root_dir, path ]) != self._root_dir: raise self.BadSpecError( 'Absolute address path {0} does not share build root {1}' .format(path, self._root_dir)) else: if path.startswith('//'): path = path[2:] path = os.path.join(self._root_dir, path) normalized = os.path.relpath(path, self._root_dir) if normalized == '.': normalized = '' return normalized if spec.endswith('::'): addresses = set() spec_path = spec[:-len('::')] spec_dir = normalize_spec_path(spec_path) if not os.path.isdir(os.path.join(self._root_dir, spec_dir)): raise self.BadSpecError( 'Can only recursive glob directories and {0} is not a valid dir' .format(spec_dir)) try: for build_file in BuildFile.scan_buildfiles( self._root_dir, spec_dir): addresses.update( self._address_mapper.addresses_in_spec_path( build_file.spec_path)) return addresses except (BuildFile.BuildFileError, AddressLookupError) as e: raise self.BadSpecError(e) elif spec.endswith(':'): spec_path = spec[:-len(':')] spec_dir = normalize_spec_path(spec_path) try: return set( self._address_mapper.addresses_in_spec_path(spec_dir)) except AddressLookupError as e: raise self.BadSpecError(e) else: spec_parts = spec.rsplit(':', 1) spec_parts[0] = normalize_spec_path(spec_parts[0]) spec_path, target_name = parse_spec(':'.join(spec_parts)) try: build_file = BuildFile.from_cache(self._root_dir, spec_path) return set([BuildFileAddress(build_file, target_name)]) except BuildFile.BuildFileError as e: raise self.BadSpecError(e)
def _parse_addresses(self, spec): if spec.endswith('::'): dir = self._get_dir(spec[:-len('::')]) for buildfile in BuildFile.scan_buildfiles(self._root_dir, os.path.join(self._root_dir, dir)): for address in Target.get_all_addresses(buildfile): yield address elif spec.endswith(':'): dir = self._get_dir(spec[:-len(':')]) for address in Target.get_all_addresses(BuildFile(self._root_dir, dir)): yield address else: yield Address.parse(self._root_dir, spec)
def _addresses(self): if self.context.target_roots: for target in self.context.target_roots: yield target.address else: build_file_parser = self.context.build_file_parser build_graph = self.context.build_graph for build_file in BuildFile.scan_buildfiles(get_buildroot()): build_file_parser.parse_build_file(build_file) for address in build_file_parser.addresses_by_build_file[build_file]: build_file_parser.inject_spec_closure_into_build_graph(address.spec, build_graph) for target in build_graph._target_by_address.values(): yield target.address
def test_directory_called_build_skipped(self): # Ensure the buildfiles found do not include grandparent/BUILD since it is a dir. buildfiles = BuildFile.scan_buildfiles(os.path.join(self.root_dir, 'grandparent')) self.assertEquals(OrderedSet([ self.create_buildfile('grandparent/parent/BUILD'), self.create_buildfile('grandparent/parent/BUILD.twitter'), self.create_buildfile('grandparent/parent/child1/BUILD'), self.create_buildfile('grandparent/parent/child1/BUILD.twitter'), self.create_buildfile('grandparent/parent/child2/child3/BUILD'), self.create_buildfile('grandparent/parent/child5/BUILD'), ]), buildfiles)
def test_scan_buildfiles_exclude(self): buildfiles = BuildFile.scan_buildfiles( self.root_dir, '', spec_excludes=[ os.path.join(self.root_dir, 'grandparent/parent/child1'), os.path.join(self.root_dir, 'grandparent/parent/child2') ]) self.assertEquals([self.create_buildfile('BUILD'), self.create_buildfile('BUILD.twitter'), self.create_buildfile('grandparent/parent/BUILD'), self.create_buildfile('grandparent/parent/BUILD.twitter'), self.create_buildfile('grandparent/parent/child5/BUILD'), ], buildfiles)
def test_scan_buildfiles_exclude_relpath(self): buildfiles = BuildFile.scan_buildfiles( self.root_dir, '', spec_excludes=[ 'grandparent/parent/child1', 'grandparent/parent/child2' ]) self.assertEquals([self.create_buildfile('BUILD'), self.create_buildfile('BUILD.twitter'), self.create_buildfile('grandparent/parent/BUILD'), self.create_buildfile('grandparent/parent/BUILD.twitter'), self.create_buildfile('grandparent/parent/child5/BUILD'), ], buildfiles)
def test_scan_buildfiles_exclude(self): buildfiles = BuildFile.scan_buildfiles( BuildFileTest.root_dir, '', spec_excludes=[ os.path.join(BuildFileTest.root_dir, 'grandparent/parent/child1'), os.path.join(BuildFileTest.root_dir, 'grandparent/parent/child2') ]) self.assertEquals([BuildFileTest.buildfile('BUILD'), BuildFileTest.buildfile('/BUILD.twitter'), BuildFileTest.buildfile('/grandparent/parent/BUILD'), BuildFileTest.buildfile('/grandparent/parent/BUILD.twitter'), BuildFileTest.buildfile('/grandparent/parent/child5/BUILD'), ], buildfiles)
def __init__(self, context): ConsoleTask.__init__(self, context) self._print_uptodate = context.options.check_deps_print_uptodate self.repos = context.config.getdict('jar-publish', 'repos') self._artifacts_to_targets = {} all_addresses = (address for buildfile in BuildFile.scan_buildfiles(get_buildroot()) for address in Target.get_all_addresses(buildfile)) for address in all_addresses: target = Target.get(address) if target.is_exported: provided_jar, _, _ = target.get_artifact_info() artifact = (provided_jar.org, provided_jar.name) if not artifact in self._artifacts_to_targets: self._artifacts_to_targets[artifact] = target
def scan_addresses(self, root=None): """Recursively gathers all addresses visible under `root` of the virtual address space. :raises AddressLookupError: if there is a problem parsing a BUILD file :param path root: defaults to the root directory of the pants project. """ addresses = set() root = root or get_buildroot() try: for build_file in BuildFile.scan_buildfiles(root): for address in self.addresses_in_spec_path(build_file.spec_path): addresses.add(address) except BuildFile.BuildFileError as e: # Handle exception from BuildFile out of paranoia. Currently, there is no way to trigger it. raise self.BuildFileScanError("{message}\n while scanning BUILD files in '{root}'." .format(message=e, root=root)) return addresses
def scan(self, root=None): """Scans and parses all BUILD files found under ``root``. Only BUILD files found under ``root`` are parsed as roots in the graph, but any dependencies of targets parsed in the root tree's BUILD files will be followed and this may lead to BUILD files outside of ``root`` being parsed and included in the returned build graph. :param string root: The path to scan; by default, the build root. :returns: A new build graph encapsulating the targets found. """ build_graph = BuildGraph() for build_file in BuildFile.scan_buildfiles(root or get_buildroot()): self.parse_build_file(build_file) for address in self.addresses_by_build_file[build_file]: self.inject_address_closure_into_build_graph(address, build_graph) return build_graph
def scan_addresses(self, root=None, spec_excludes=None): """Recursively gathers all addresses visible under `root` of the virtual address space. :raises AddressLookupError: if there is a problem parsing a BUILD file :param path root: defaults to the root directory of the pants project. """ addresses = set() root = root or get_buildroot() try: for build_file in BuildFile.scan_buildfiles(root, spec_excludes=spec_excludes): for address in self.addresses_in_spec_path(build_file.spec_path): addresses.add(address) except BuildFile.BuildFileError as e: # Handle exception from BuildFile out of paranoia. Currently, there is no way to trigger it. raise self.BuildFileScanError("{message}\n while scanning BUILD files in '{root}'." .format(message=e, root=root)) return addresses
def _parse_spec(self, spec): def normalize_spec_path(path): is_abs = not path.startswith('//') and os.path.isabs(path) if is_abs: path = os.path.realpath(path) if os.path.commonprefix([self._root_dir, path]) != self._root_dir: raise self.BadSpecError('Absolute spec path {0} does not share build root {1}' .format(path, self._root_dir)) else: if path.startswith('//'): path = path[2:] path = os.path.join(self._root_dir, path) normalized = os.path.relpath(path, self._root_dir) if normalized == '.': normalized = '' return normalized if spec.endswith('::'): addresses = set() spec_path = spec[:-len('::')] spec_dir = normalize_spec_path(spec_path) if not os.path.isdir(os.path.join(self._root_dir, spec_dir)): raise self.BadSpecError('Can only recursive glob directories and {0} is not a valid dir' .format(spec_dir)) try: for build_file in BuildFile.scan_buildfiles(self._root_dir, spec_dir): addresses.update(self._address_mapper.addresses_in_spec_path(build_file.spec_path)) return addresses except (IOError, BuildFile.MissingBuildFileError, AddressLookupError) as e: raise self.BadSpecError(e) elif spec.endswith(':'): spec_path = spec[:-len(':')] spec_dir = normalize_spec_path(spec_path) try: return set(self._address_mapper.addresses_in_spec_path(spec_dir)) except (IOError, BuildFile.MissingBuildFileError, AddressLookupError) as e: raise self.BadSpecError(e) else: spec_parts = spec.rsplit(':', 1) spec_parts[0] = normalize_spec_path(spec_parts[0]) spec_path, target_name = parse_spec(':'.join(spec_parts)) try: build_file = BuildFile.from_cache(self._root_dir, spec_path) return set([BuildFileAddress(build_file, target_name)]) except (IOError, BuildFile.MissingBuildFileError) as e: raise self.BadSpecError(e)
def parse_addresses(self, spec): if spec.endswith('::'): spec_rel_dir = self._get_dir(spec[:-len('::')]) spec_dir = os.path.join(self._root_dir, spec_rel_dir) for build_file in BuildFile.scan_buildfiles(self._root_dir, spec_dir): self._build_file_parser.parse_build_file(build_file) for address in self._build_file_parser.addresses_by_build_file[build_file]: yield address elif spec.endswith(':'): spec_rel_dir = self._get_dir(spec[:-len(':')]) spec_dir = os.path.join(self._root_dir, spec_rel_dir) for build_file in BuildFile(self._root_dir, spec_dir).family(): self._build_file_parser.parse_build_file(build_file) for address in self._build_file_parser.addresses_by_build_file[build_file]: yield address else: spec_path, target_name = parse_spec(spec) build_file = BuildFile(self._root_dir, spec_path) yield BuildFileAddress(build_file, target_name)
def __init__(self, context, workdir): super(CheckPublishedDeps, self).__init__(context, workdir) self._print_uptodate = context.options.check_deps_print_uptodate self.repos = context.config.getdict('jar-publish', 'repos') self._artifacts_to_targets = {} build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser # TODO(pl): Hoist this pattern into the BuildFileParser/BuildGraph API for build_file in BuildFile.scan_buildfiles(get_buildroot()): build_file_parser.parse_build_file(build_file) for address in build_file_parser.addresses_by_build_file[build_file]: build_file_parser.inject_spec_closure_into_build_graph(address.spec, build_graph) for target in build_graph._target_by_address.values(): if target.is_exported: provided_jar, _, _ = target.get_artifact_info() artifact = (provided_jar.org, provided_jar.name) if not artifact in self._artifacts_to_targets: self._artifacts_to_targets[artifact] = target
def __init__(self, context, workdir): super(CheckPublishedDeps, self).__init__(context, workdir) self._print_uptodate = context.options.check_deps_print_uptodate self.repos = context.config.getdict('jar-publish', 'repos') self._artifacts_to_targets = {} build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser # TODO(pl): Hoist this pattern into the BuildFileParser/BuildGraph API for build_file in BuildFile.scan_buildfiles(get_buildroot()): build_file_parser.parse_build_file(build_file) for address in build_file_parser.addresses_by_build_file[ build_file]: build_file_parser.inject_spec_closure_into_build_graph( address.spec, build_graph) for target in build_graph._target_by_address.values(): if target.is_exported: provided_jar, _, _ = target.get_artifact_info() artifact = (provided_jar.org, provided_jar.name) if not artifact in self._artifacts_to_targets: self._artifacts_to_targets[artifact] = target
def parse_addresses(self, spec): if spec.endswith('::'): spec_rel_dir = self._get_dir(spec[:-len('::')]) spec_dir = os.path.join(self._root_dir, spec_rel_dir) for build_file in BuildFile.scan_buildfiles( self._root_dir, spec_dir): self._build_file_parser.parse_build_file(build_file) for address in self._build_file_parser.addresses_by_build_file[ build_file]: yield address elif spec.endswith(':'): spec_rel_dir = self._get_dir(spec[:-len(':')]) spec_dir = os.path.join(self._root_dir, spec_rel_dir) for build_file in BuildFile(self._root_dir, spec_dir).family(): self._build_file_parser.parse_build_file(build_file) for address in self._build_file_parser.addresses_by_build_file[ build_file]: yield address else: spec_path, target_name = parse_spec(spec) build_file = BuildFile(self._root_dir, spec_path) yield BuildFileAddress(build_file, target_name)
def _parse_spec(self, spec, fail_fast=False): def normalize_spec_path(path): is_abs = not path.startswith('//') and os.path.isabs(path) if is_abs: path = os.path.realpath(path) if os.path.commonprefix([self._root_dir, path ]) != self._root_dir: raise self.BadSpecError( 'Absolute address path {0} does not share build root {1}' .format(path, self._root_dir)) else: if path.startswith('//'): path = path[2:] path = os.path.join(self._root_dir, path) normalized = os.path.relpath(path, self._root_dir) if normalized == '.': normalized = '' return normalized errored_out = [] if spec.endswith('::'): addresses = set() spec_path = spec[:-len('::')] spec_dir = normalize_spec_path(spec_path) if not os.path.isdir(os.path.join(self._root_dir, spec_dir)): raise self.BadSpecError( 'Can only recursive glob directories and {0} is not a valid dir' .format(spec_dir)) try: build_files = BuildFile.scan_buildfiles( self._root_dir, spec_dir, spec_excludes=self._spec_excludes) except (BuildFile.BuildFileError, AddressLookupError) as e: raise self.BadSpecError(e) for build_file in build_files: try: # This attempts to filter out broken BUILD files before we parse them. if self._not_excluded_spec(build_file.spec_path): addresses.update( self._address_mapper.addresses_in_spec_path( build_file.spec_path)) except (BuildFile.BuildFileError, AddressLookupError) as e: if fail_fast: raise self.BadSpecError(e) errored_out.append('--------------------') errored_out.append(traceback.format_exc()) errored_out.append('Exception message: {0}'.format( e.message)) if errored_out: error_msg = '\n'.join( errored_out + ["Invalid BUILD files for [{0}]".format(spec)]) raise self.BadSpecError(error_msg) return addresses elif spec.endswith(':'): spec_path = spec[:-len(':')] spec_dir = normalize_spec_path(spec_path) try: return set( self._address_mapper.addresses_in_spec_path(spec_dir)) except AddressLookupError as e: raise self.BadSpecError(e) else: spec_parts = spec.rsplit(':', 1) spec_parts[0] = normalize_spec_path(spec_parts[0]) spec_path, target_name = parse_spec(':'.join(spec_parts)) try: build_file = BuildFile.from_cache(self._root_dir, spec_path) return set([BuildFileAddress(build_file, target_name)]) except BuildFile.BuildFileError as e: raise self.BadSpecError(e)
def _parse_spec(self, spec, fail_fast=False): def normalize_spec_path(path): is_abs = not path.startswith('//') and os.path.isabs(path) if is_abs: path = os.path.realpath(path) if os.path.commonprefix([self._root_dir, path]) != self._root_dir: raise self.BadSpecError('Absolute address path {0} does not share build root {1}' .format(path, self._root_dir)) else: if path.startswith('//'): path = path[2:] path = os.path.join(self._root_dir, path) normalized = os.path.relpath(path, self._root_dir) if normalized == '.': normalized = '' return normalized errored_out = [] if spec.endswith('::'): addresses = set() spec_path = spec[:-len('::')] spec_dir = normalize_spec_path(spec_path) if not os.path.isdir(os.path.join(self._root_dir, spec_dir)): raise self.BadSpecError('Can only recursive glob directories and {0} is not a valid dir' .format(spec_dir)) try: build_files = BuildFile.scan_buildfiles(self._root_dir, spec_dir, spec_excludes=self._spec_excludes) except (BuildFile.BuildFileError, AddressLookupError) as e: raise self.BadSpecError(e) for build_file in build_files: try: # This attempts to filter out broken BUILD files before we parse them. if self._not_excluded_spec(build_file.spec_path): addresses.update(self._address_mapper.addresses_in_spec_path(build_file.spec_path)) except (BuildFile.BuildFileError, AddressLookupError) as e: if fail_fast: raise self.BadSpecError(e) errored_out.append('--------------------') errored_out.append(traceback.format_exc()) errored_out.append('Exception message: {0}'.format(e.message)) if errored_out: error_msg = '\n'.join(errored_out + ["Invalid BUILD files for [{0}]".format(spec)]) raise self.BadSpecError(error_msg) return addresses elif spec.endswith(':'): spec_path = spec[:-len(':')] spec_dir = normalize_spec_path(spec_path) try: return set(self._address_mapper.addresses_in_spec_path(spec_dir)) except AddressLookupError as e: raise self.BadSpecError(e) else: spec_parts = spec.rsplit(':', 1) spec_parts[0] = normalize_spec_path(spec_parts[0]) spec_path, target_name = parse_spec(':'.join(spec_parts)) try: build_file = BuildFile.from_cache(self._root_dir, spec_path) return set([BuildFileAddress(build_file, target_name)]) except BuildFile.BuildFileError as e: raise self.BadSpecError(e)
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser # machinery try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit( '.', 2) module = __import__('%s.%s' % (from_list, module), fromlist=[from_list]) target_type = getattr(module, type_name) except (ImportError, ValueError): # Fall back on pants provided target types. if dependees_type not in pants.base.build_file_aliases.target_aliases: raise TaskError('Invalid type name: %s' % dependees_type) target_type = pants.base.build_file_aliases.target_aliases[ dependees_type] # Find the SourceRoot for the given input type base_paths.update(SourceRoot.roots(target_type)) if not base_paths: raise TaskError( 'No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update( BuildFile.scan_buildfiles(get_buildroot(), base_path)) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser dependees_by_target = defaultdict(set) for build_file in buildfiles: build_file_parser.parse_build_file(build_file) for address in build_file_parser.addresses_by_build_file[ build_file]: build_file_parser.inject_spec_closure_into_build_graph( address.spec, build_graph) for address in build_file_parser.addresses_by_build_file[ build_file]: target = build_graph.get_target(address) # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) for dependency in target.dependencies: dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield root.address.build_file_spec for dependant in self.get_dependants(dependees_by_target, roots): yield dependant.address.build_file_spec