def _collect_ancestor_build_files(cls, project_tree, dir_relpath, build_ignore_patterns): for build_file in BuildFile.get_build_files_family(project_tree, dir_relpath, build_ignore_patterns): yield build_file while not cls._is_root_relpath(dir_relpath): dir_relpath = os.path.dirname(dir_relpath) for build_file in BuildFile.get_build_files_family(project_tree, dir_relpath, build_ignore_patterns): yield build_file
def tearDown(self): """ :API: public """ super(BaseTest, self).tearDown() BuildFile.clear_cache() Subsystem.reset()
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser # machinery try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit('.', 2) module = __import__('%s.%s' % (from_list, module), fromlist=[from_list]) target_type = getattr(module, type_name) except (ImportError, ValueError): # Fall back on pants provided target types. registered_aliases = self.context.build_file_parser.registered_aliases() if dependees_type not in registered_aliases.targets: raise TaskError('Invalid type name: %s' % dependees_type) target_type = registered_aliases.targets[dependees_type] # Try to find the SourceRoot for the given input type try: roots = SourceRoot.roots(target_type) base_paths.update(roots) except KeyError: pass if not base_paths: raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), os.path.join(get_buildroot(), base_path))) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser dependees_by_target = defaultdict(set) for build_file in buildfiles: build_file_parser.parse_build_file(build_file) for address in build_file_parser.addresses_by_build_file[build_file]: build_file_parser.inject_spec_closure_into_build_graph(address.spec, build_graph) for address in build_file_parser.addresses_by_build_file[build_file]: target = build_graph.get_target(address) # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) for dependency in target.dependencies: dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield root.address.spec for dependant in self.get_dependants(dependees_by_target, roots): yield dependant.address.spec
def _candidate_owners(self, path): build_file = BuildFile(get_buildroot(), relpath=os.path.dirname(path), must_exist=False) if build_file.exists(): yield build_file for sibling in build_file.siblings(): yield sibling for ancestor in build_file.ancestors(): yield ancestor
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: target_aliases = self.context.build_file_parser.registered_aliases().targets if dependees_type not in target_aliases: raise TaskError('Invalid type name: %s' % dependees_type) target_type = target_aliases[dependees_type] # Try to find the SourceRoot for the given input type try: roots = SourceRoot.roots(target_type) base_paths.update(roots) except KeyError: pass if not base_paths: raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), os.path.join(get_buildroot(), base_path))) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser address_mapper = self.context.address_mapper dependees_by_target = defaultdict(set) for build_file in buildfiles: address_map = build_file_parser.parse_build_file(build_file) for address in address_map.keys(): build_graph.inject_address_closure(address) for address in address_map.keys(): target = build_graph.get_target(address) # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) for dependency in target.dependencies: dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield root.address.spec for dependant in self.get_dependants(dependees_by_target, roots): yield dependant.address.spec
def _expand_goals_and_specs(self): goals = self.options.goals specs = self.options.target_specs fail_fast = self.options.for_global_scope().fail_fast for goal in goals: if BuildFile.from_cache(get_buildroot(), goal, must_exist=False).exists(): logger.warning(" Command-line argument '{0}' is ambiguous and was assumed to be " "a goal. If this is incorrect, disambiguate it with ./{0}.".format(goal)) if self.options.print_help_if_requested(): sys.exit(0) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): spec_parser = CmdLineSpecParser(self.root_dir, self.address_mapper, spec_excludes=self.spec_excludes, exclude_target_regexps=self.global_options.exclude_target_regexp) with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): for spec in specs: for address in spec_parser.parse_addresses(spec, fail_fast): self.build_graph.inject_address_closure(address) self.targets.append(self.build_graph.get_target(address)) self.goals = [Goal.by_name(goal) for goal in goals]
def to_url(m): if m.group(1): return m.group(0) # It's an http(s) url. path = m.group(0) if path.startswith('/'): path = os.path.relpath(path, buildroot) elif path.startswith('..'): # The path is not located inside the buildroot, so it's definitely not a BUILD file. return None else: # The path is located in the buildroot: see if it's a reference to a target in a BUILD file. parts = path.split(':') if len(parts) == 2: putative_dir = parts[0] else: putative_dir = path if os.path.isdir(os.path.join(buildroot, putative_dir)): build_files = list(BuildFile.get_build_files_family( FileSystemProjectTree(buildroot), putative_dir)) if build_files: path = build_files[0].relpath else: return None if os.path.exists(os.path.join(buildroot, path)): # The reporting server serves file content at /browse/<path_from_buildroot>. return '/browse/{}'.format(path) else: return None
def address_map_from_spec_path(self, spec_path): build_file = BuildFile.from_cache(self._root_dir, spec_path) family_address_map_by_build_file = self.parse_build_file_family(build_file) address_map = {} for build_file, sibling_address_map in family_address_map_by_build_file.items(): address_map.update(sibling_address_map) return address_map
def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Add any extra paths to python path (eg for loading extra source backends) extra_paths = self.config.getlist('backends', 'python-path', []) if extra_paths: sys.path.extend(extra_paths) # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends(plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scopes = [''] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options(known_scopes=known_scopes) self.register_options() self.run_tracker = RunTracker.from_config(self.config) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser(build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs()
def scan_addresses(self, root=None): """Recursively gathers all addresses visible under `root` of the virtual address space. :param string root: The absolute path of the root to scan; defaults to the root directory of the pants project. :rtype: set of :class:`pants.build_graph.address.Address` :raises AddressLookupError: if there is a problem parsing a BUILD file """ root_dir = get_buildroot() base_path = None if root: try: base_path = fast_relpath(root, root_dir) except ValueError as e: raise self.InvalidRootError(e) addresses = set() try: for build_file in BuildFile.scan_build_files(self._project_tree, base_relpath=base_path, build_ignore_patterns=self._build_ignore_patterns): for address in self.addresses_in_spec_path(build_file.spec_path): addresses.add(address) except BuildFile.BuildFileError as e: # Handle exception from BuildFile out of paranoia. Currently, there is no way to trigger it. raise self.BuildFileScanError("{message}\n while scanning BUILD files in '{root}'." .format(message=e, root=root)) return addresses
def _parse_spec(self, spec): def normalize_spec_path(path): path = os.path.join(self._root_dir, path.lstrip('//')) normalized = os.path.relpath(os.path.realpath(path), self._root_dir) if normalized == '.': normalized = '' return normalized if spec.endswith('::'): spec_path = spec[:-len('::')] spec_dir = normalize_spec_path(spec_path) for build_file in BuildFile.scan_buildfiles(self._root_dir, spec_dir): self._build_file_parser.parse_build_file(build_file) for address in self._build_file_parser.addresses_by_build_file[build_file]: yield address elif spec.endswith(':'): spec_path = spec[:-len(':')] spec_dir = normalize_spec_path(spec_path) for build_file in BuildFile(self._root_dir, spec_dir).family(): self._build_file_parser.parse_build_file(build_file) for address in self._build_file_parser.addresses_by_build_file[build_file]: yield address else: spec_parts = spec.rsplit(':', 1) spec_parts[0] = normalize_spec_path(spec_parts[0]) spec_path, target_name = parse_spec(':'.join(spec_parts)) build_file = BuildFile(self._root_dir, spec_path) yield BuildFileAddress(build_file, target_name)
def test_sibling_build_files_duplicates(self): # This workspace is malformed, you can't shadow a name in a sibling BUILD file self.add_to_build_file('BUILD', dedent( """ fake(name="base", dependencies=[ ':foo', ]) """)) self.add_to_build_file('BUILD.foo', dedent( """ fake(name="foo", dependencies=[ ':bat', ]) """)) self.add_to_build_file('./BUILD.bar', dedent( """ fake(name="base") """)) with self.assertRaises(BuildFileParser.SiblingConflictException): self.build_file_parser.address_map_from_build_files( BuildFile.get_build_files_family(FileSystemProjectTree(self.build_root), '.'))
def test_sibling_build_files(self): self.add_to_build_file('BUILD', dedent( """ fake(name="base", dependencies=[ ':foo', ]) """)) self.add_to_build_file('BUILD.foo', dedent( """ fake(name="foo", dependencies=[ ':bat', ]) """)) self.add_to_build_file('./BUILD.bar', dedent( """ fake(name="bat") """)) bar_build_file = self.create_buildfile('BUILD.bar') base_build_file = self.create_buildfile('BUILD') foo_build_file = self.create_buildfile('BUILD.foo') address_map = self.build_file_parser.address_map_from_build_files( BuildFile.get_build_files_family(FileSystemProjectTree(self.build_root), ".")) addresses = address_map.keys() self.assertEqual({bar_build_file.relpath, base_build_file.relpath, foo_build_file.relpath}, set([address.rel_path for address in addresses])) self.assertEqual({'//:base', '//:foo', '//:bat'}, set([address.spec for address in addresses]))
def _parse_spec(self, spec): def normalize_spec_path(path): is_abs = not path.startswith('//') and os.path.isabs(path) if is_abs: path = os.path.realpath(path) if os.path.commonprefix([self._root_dir, path]) != self._root_dir: raise self.BadSpecError('Absolute spec path {0} does not share build root {1}' .format(path, self._root_dir)) else: if path.startswith('//'): path = path[2:] path = os.path.join(self._root_dir, path) normalized = os.path.relpath(path, self._root_dir) if normalized == '.': normalized = '' return normalized if spec.endswith('::'): addresses = set() spec_path = spec[:-len('::')] spec_dir = normalize_spec_path(spec_path) if not os.path.isdir(os.path.join(self._root_dir, spec_dir)): raise self.BadSpecError('Can only recursive glob directories and {0} is not a valid dir' .format(spec_dir)) try: for build_file in BuildFile.scan_buildfiles(self._root_dir, spec_dir): addresses.update(self._address_mapper.addresses_in_spec_path(build_file.spec_path)) return addresses except (IOError, BuildFile.MissingBuildFileError, AddressLookupError) as e: raise self.BadSpecError(e) elif spec.endswith(':'): spec_path = spec[:-len(':')] spec_dir = normalize_spec_path(spec_path) try: return set(self._address_mapper.addresses_in_spec_path(spec_dir)) except (IOError, BuildFile.MissingBuildFileError, AddressLookupError) as e: raise self.BadSpecError(e) else: spec_parts = spec.rsplit(':', 1) spec_parts[0] = normalize_spec_path(spec_parts[0]) spec_path, target_name = parse_spec(':'.join(spec_parts)) try: build_file = BuildFile.from_cache(self._root_dir, spec_path) return set([BuildFileAddress(build_file, target_name)]) except (IOError, BuildFile.MissingBuildFileError) as e: raise self.BadSpecError(e)
def is_declaring_file(address, file_path): # NB: this will cause any BUILD file, whether it contains the address declaration or not to be # considered the one that declared it. That's ok though, because the spec path should be enough # information for debugging most of the time. # # We could call into the engine to ask for the file that declared the address. return (os.path.dirname(file_path) == address.spec_path and BuildFile._is_buildfile_name(os.path.basename(file_path)))
def _addresses(self): if self.context.target_roots: for target in self.context.target_roots: yield target.address else: for buildfile in BuildFile.scan_buildfiles(self._root_dir): for address in Target.get_all_addresses(buildfile): yield address
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit('.', 2) __import__('%s.%s' % (from_list, module), fromlist=[from_list]) except (ImportError, ValueError): # Fall back on pants provided target types. if hasattr(pants.base.build_file_context, dependees_type): type_name = getattr(pants.base.build_file_context, dependees_type) else: raise TaskError('Invalid type name: %s' % dependees_type) # Find the SourceRoot for the given input type base_paths.update(SourceRoot.roots(type_name)) if not base_paths: raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), base_path)) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) dependees_by_target = defaultdict(set) for buildfile in buildfiles: for address in Target.get_all_addresses(buildfile): for target in Target.get(address).resolve(): # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) if hasattr(target, 'dependencies'): for dependencies in target.dependencies: for dependency in dependencies.resolve(): dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield str(root.address) for dependant in self.get_dependants(dependees_by_target, roots): yield str(dependant.address)
def scan_addresses(root_dir, base_path=None): """Parses all targets available in BUILD files under base_path and returns their addresses. If no base_path is specified, root_dir is assumed to be the base_path""" addresses = OrderedSet() for buildfile in BuildFile.scan_buildfiles(root_dir, base_path): addresses.update(Target.get_all_addresses(buildfile)) return addresses
def setup_parser(self, parser, args): if not args: args.append('help') logger = logging.getLogger(__name__) goals = self.new_options.goals specs = self.new_options.target_specs fail_fast = self.new_options.for_global_scope().fail_fast for goal in goals: if BuildFile.from_cache(get_buildroot(), goal, must_exist=False).exists(): logger.warning(" Command-line argument '{0}' is ambiguous and was assumed to be " "a goal. If this is incorrect, disambiguate it with ./{0}.".format(goal)) if self.new_options.is_help: self.new_options.print_help(goals=goals) sys.exit(0) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): spec_parser = CmdLineSpecParser(self.root_dir, self.address_mapper, spec_excludes=self.get_spec_excludes()) with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): for spec in specs: for address in spec_parser.parse_addresses(spec, fail_fast): self.build_graph.inject_address_closure(address) self.targets.append(self.build_graph.get_target(address)) self.goals = [Goal.by_name(goal) for goal in goals] rcfiles = self.config.getdefault('rcfiles', type=list, default=['/etc/pantsrc', '~/.pants.rc']) if rcfiles: rcfile = RcFile(rcfiles, default_prepend=False, process_default=True) # Break down the goals specified on the command line to the full set that will be run so we # can apply default flags to inner goal nodes. Also break down goals by Task subclass and # register the task class hierarchy fully qualified names so we can apply defaults to # baseclasses. sections = OrderedSet() for goal in Engine.execution_order(self.goals): for task_name in goal.ordered_task_names(): sections.add(task_name) task_type = goal.task_type_by_name(task_name) for clazz in task_type.mro(): if clazz == Task: break sections.add('%s.%s' % (clazz.__module__, clazz.__name__)) augmented_args = rcfile.apply_defaults(sections, args) if augmented_args != args: # TODO(John Sirois): Cleanup this currently important mutation of the passed in args # once the 2-layer of command -> goal is squashed into one. args[:] = augmented_args sys.stderr.write("(using pantsrc expansion: pants goal %s)\n" % ' '.join(augmented_args))
def _find_targets(self): if len(self.context.target_roots) > 0: for target in self.context.target_roots: yield target else: for buildfile in BuildFile.scan_buildfiles(get_buildroot()): target_addresses = Target.get_all_addresses(buildfile) for target_address in target_addresses: yield Target.get(target_address)
def configure_target(target): if target not in analyzed_targets: analyzed_targets.add(target) self.has_scala = not self.skip_scala and (self.has_scala or is_scala(target)) # Hack for java_sources and Eclipse/IntelliJ: add java_sources to project if isinstance(target, ScalaLibrary): for java_source in target.java_sources: configure_target(java_source) # Resources are already in the target set if target.has_resources: resources_by_basedir = defaultdict(set) for resources in target.resources: analyzed_targets.add(resources) resources_by_basedir[resources.target_base].update(relative_sources(resources)) for basedir, resources in resources_by_basedir.items(): self.resource_extensions.update(Project.extract_resource_extensions(resources)) configure_source_sets(basedir, resources, is_test=target.is_test, resources_only=True) if target.has_sources(): test = target.is_test self.has_tests = self.has_tests or test base = target.target_base configure_source_sets( base, relative_sources(target), is_test=test, resources_only=isinstance(target, Resources) ) # TODO(Garrett Malmquist): This is dead code, and should be redone/reintegrated. # Other BUILD files may specify sources in the same directory as this target. Those BUILD # files might be in parent directories (globs('a/b/*.java')) or even children directories if # this target globs children as well. Gather all these candidate BUILD files to test for # sources they own that live in the directories this targets sources live in. target_dirset = find_source_basedirs(target) if not isinstance(target.address, BuildFileAddress): return [] # Siblings only make sense for BUILD files. candidates = OrderedSet() build_file = target.address.build_file dir_relpath = os.path.dirname(build_file.relpath) for descendant in BuildFile.scan_build_files( build_file.project_tree, dir_relpath, spec_excludes=self.spec_excludes, build_ignore_patterns=self.build_ignore_patterns, ): candidates.update(self.target_util.get_all_addresses(descendant)) if not self._is_root_relpath(dir_relpath): ancestors = self._collect_ancestor_build_files( build_file.project_tree, os.path.dirname(dir_relpath), self.build_ignore_patterns ) for ancestor in ancestors: candidates.update(self.target_util.get_all_addresses(ancestor)) def is_sibling(target): return source_target(target) and target_dirset.intersection(find_source_basedirs(target)) return filter(is_sibling, [self.target_util.get(a) for a in candidates if a != target.address])
def scan_addresses(self, root=None): """Recursively gathers all addresses visible under `root` of the virtual address space. :param path root: defaults to the root directory of the pants project. """ addresses = set() for build_file in BuildFile.scan_buildfiles(root or get_buildroot()): for address in self.addresses_in_spec_path(build_file.spec_path): addresses.add(address) return addresses
def is_spec(spec): if os.sep in spec or ':' in spec: return True # Definitely not a goal. if not (spec in goal_names): return True # Definitely not a (known) goal. if has_double_dash: # This means that we're parsing the half of the expression before a --, so assume it's a # goal without warning. return False # Here, it's possible we have a goal and target with the same name. For now, always give # priority to the goal, but give a warning if they might have meant the target (if the BUILD # file exists). try: BuildFile.from_cache(get_buildroot(), spec) msg = (' Command-line argument "{spec}" is ambiguous, and was assumed to be a goal.' ' If this is incorrect, disambiguate it with the "--" argument to separate goals' ' from targets.') logger.warning(msg.format(spec=spec)) except IOError: pass # Awesome, it's unambiguous. return False
def test_directory_called_build_skipped(self): # Ensure the buildfiles found do not include grandparent/BUILD since it is a dir. buildfiles = BuildFile.scan_buildfiles(os.path.join(BuildFileTest.root_dir, 'grandparent')) self.assertEquals(OrderedSet([ BuildFileTest.buildfile('grandparent/parent/BUILD'), BuildFileTest.buildfile('grandparent/parent/BUILD.twitter'), BuildFileTest.buildfile('grandparent/parent/child1/BUILD'), BuildFileTest.buildfile('grandparent/parent/child1/BUILD.twitter'), BuildFileTest.buildfile('grandparent/parent/child2/child3/BUILD'), ]), buildfiles)
def any_is_declaring_file(address, file_paths): try: # A precise check for BuildFileAddress return address.rel_path in file_paths except AttributeError: pass # NB: this will cause any BUILD file, whether it contains the address declaration or not to be # considered the one that declared it. That's ok though, because the spec path should be enough # information for debugging most of the time. return any(address.spec_path == os.path.dirname(fp) for fp in file_paths if BuildFile._is_buildfile_name(os.path.basename(fp)))
def _parse_addresses(self, spec): if spec.endswith('::'): dir = self._get_dir(spec[:-len('::')]) for buildfile in BuildFile.scan_buildfiles(self._root_dir, os.path.join(self._root_dir, dir)): for address in Target.get_all_addresses(buildfile): yield address elif spec.endswith(':'): dir = self._get_dir(spec[:-len(':')]) for address in Target.get_all_addresses(BuildFile(self._root_dir, dir)): yield address else: yield Address.parse(self._root_dir, spec)
def address_map_from_spec_path(self, spec_path): try: build_file = BuildFile.from_cache(self._root_dir, spec_path) except BuildFile.BuildFileError as e: raise self.BuildFileScanError("{message}\n searching {spec_path}" .format(message=e, spec_path=spec_path)) family_address_map_by_build_file = self.parse_build_file_family(build_file) address_map = {} for build_file, sibling_address_map in family_address_map_by_build_file.items(): address_map.update(sibling_address_map) return address_map
def __init__(self, run_tracker, root_dir, parser, args, build_file_parser, address_mapper, build_graph, needs_old_options=True): """run_tracker: The (already opened) RunTracker to track this run with root_dir: The root directory of the pants workspace parser: an OptionParser args: the subcommand arguments to parse""" self.run_tracker = run_tracker self.root_dir = root_dir self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.build_graph = build_graph config = Config.from_cache() with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Override the OptionParser's error with more useful output def error(message=None, show_help=True): if message: print(message + '\n') if show_help: parser.print_help() parser.exit(status=1) parser.error = error self.error = error self.register_options() self.setup_parser(parser, args) if needs_old_options: self.old_options, self.args = parser.parse_args(args) else: # Ensure a predictable error if anything under goal tries to use these. self.old_options = None self.args = None
def resolve(self, address): """Maps an address in the virtual address space to an object. :param Address address: the address to lookup in a BUILD file :raises AddressLookupError: if the path to the address is not found. :returns: Addressable from a build file specified by address """ address_map = self.address_map_from_spec_path(address.spec_path) if address not in address_map: build_file = BuildFile.from_cache(self.root_dir, address.spec_path, must_exist=False) self._raise_incorrect_address_error(build_file, address.target_name, address_map) else: return address_map[address]
def _addresses(self): if self.context.target_roots: for target in self.context.target_roots: yield target.address else: build_file_parser = self.context.build_file_parser build_graph = self.context.build_graph for build_file in BuildFile.scan_buildfiles(get_buildroot()): build_file_parser.parse_build_file(build_file) for address in build_file_parser.addresses_by_build_file[build_file]: build_file_parser.inject_spec_closure_into_build_graph(address.spec, build_graph) for target in build_graph._target_by_address.values(): yield target.address
def test_exception_class_hierarchy(self): """Exception handling code depends on the fact that all exceptions from BuildFile are subclassed from the BuildFileError base class.""" self.assertIsInstance(BuildFile.MissingBuildFileError(), BuildFile.BuildFileError)
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser # machinery try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit( '.', 2) module = __import__('%s.%s' % (from_list, module), fromlist=[from_list]) target_type = getattr(module, type_name) except (ImportError, ValueError): # Fall back on pants provided target types. if dependees_type not in pants.base.build_file_aliases.target_aliases: raise TaskError('Invalid type name: %s' % dependees_type) target_type = pants.base.build_file_aliases.target_aliases[ dependees_type] # Find the SourceRoot for the given input type base_paths.update(SourceRoot.roots(target_type)) if not base_paths: raise TaskError( 'No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update( BuildFile.scan_buildfiles(get_buildroot(), base_path)) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) build_graph = self.context.build_graph build_file_parser = self.context.build_file_parser dependees_by_target = defaultdict(set) for build_file in buildfiles: build_file_parser.parse_build_file(build_file) for address in build_file_parser.addresses_by_build_file[ build_file]: build_file_parser.inject_spec_closure_into_build_graph( address.spec, build_graph) for address in build_file_parser.addresses_by_build_file[ build_file]: target = build_graph.get_target(address) # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) for dependency in target.dependencies: dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield root.address.build_file_spec for dependant in self.get_dependants(dependees_by_target, roots): yield dependant.address.build_file_spec
def test_raises_execute_error(self): self.add_to_build_file('BUILD', 'undefined_alias(name="baz")') build_file = BuildFile(FileSystemProjectTree(self.build_root), 'BUILD') with self.assertRaises(BuildFileParser.ExecuteError): self.build_file_parser.parse_build_file(build_file)
def test_exposed_object(self): self.add_to_build_file('BUILD', '''fake_object''') build_file = BuildFile(self.build_root, 'BUILD') address_map = self.build_file_parser.parse_build_file(build_file) self.assertEqual(len(address_map), 0)
def test_noop_parse(self): self.add_to_build_file('BUILD', '') build_file = BuildFile(self.build_root, '') address_map = set(self.build_file_parser.parse_build_file(build_file)) self.assertEqual(len(address_map), 0)
def test_exposed_object(self): self.add_to_build_file('BUILD', """fake_object""") build_file = BuildFile(FileSystemProjectTree(self.build_root), 'BUILD') address_map = self.build_file_parser.parse_build_file(build_file) self.assertEqual(len(address_map), 0)
def __init__(self, *args, **kwargs): super(Py, self).__init__(*args, **kwargs) self.binary = None self.targets = [] self.extra_requirements = [] self.config = Config.load() interpreters = self.options.interpreters or [b''] self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup(filters=interpreters) interpreters = self.interpreter_cache.select_interpreter( list(self.interpreter_cache.matches(interpreters))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') self.interpreter = interpreters[0] for req in self.options.extra_requirements: self.extra_requirements.append( PythonRequirement(req, use_2to3=True)) # We parse each arg in the context of the cli usage: # ./pants command (options) [spec] (build args) # ./pants command (options) [spec]... -- (build args) # Our command token and our options are parsed out so we see args of the form: # [spec] (build args) # [spec]... -- (build args) for k in range(len(self.args)): arg = self.args.pop(0) if arg == '--': break def not_a_target(debug_msg): self.debug('Not a target, assuming option: %s.' % debug_msg) # We failed to parse the arg as a target or else it was in valid address format but did not # correspond to a real target. Assume this is the 1st of the build args and terminate # processing args for target addresses. self.args.insert(0, arg) try: print(self.root_dir, arg, file=sys.stderr) self.build_graph.inject_spec_closure(arg) spec_path, target_name = parse_spec(arg) build_file = BuildFile.from_cache(self.root_dir, spec_path) address = BuildFileAddress(build_file, target_name) target = self.build_graph.get_target(address) if target is None: not_a_target(debug_msg='Unrecognized target') break except Exception as e: not_a_target(debug_msg=e) break if isinstance(target, PythonBinary): if self.binary: self.error( 'Can only process 1 binary target. Found %s and %s.' % (self.binary, target)) else: self.binary = target self.targets.append(target) if not self.targets: self.error('No valid targets specified!')
def configure_target(target): if target not in analyzed_targets: analyzed_targets.add(target) self.has_scala = not self.skip_scala and (self.has_scala or is_scala(target)) # Hack for java_sources and Eclipse/IntelliJ: add java_sources to project if isinstance(target, ScalaLibrary): for java_source in target.java_sources: configure_target(java_source) # Resources are already in the target set if target.has_resources: resources_by_basedir = defaultdict(set) for resources in target.resources: analyzed_targets.add(resources) resources_by_basedir[resources.target_base].update( relative_sources(resources)) for basedir, resources in resources_by_basedir.items(): self.resource_extensions.update( Project.extract_resource_extensions(resources)) configure_source_sets(basedir, resources, is_test=target.is_test, resources_only=True) if target.has_sources(): test = target.is_test self.has_tests = self.has_tests or test base = target.target_base configure_source_sets(base, relative_sources(target), is_test=test, resources_only=isinstance( target, Resources)) # TODO(Garrett Malmquist): This is dead code, and should be redone/reintegrated. # Other BUILD files may specify sources in the same directory as this target. Those BUILD # files might be in parent directories (globs('a/b/*.java')) or even children directories if # this target globs children as well. Gather all these candidate BUILD files to test for # sources they own that live in the directories this targets sources live in. target_dirset = find_source_basedirs(target) if not isinstance(target.address, BuildFileAddress): return [] # Siblings only make sense for BUILD files. candidates = OrderedSet() build_file = target.address.build_file dir_relpath = os.path.dirname(build_file.relpath) for descendant in BuildFile.scan_build_files( build_file.project_tree, dir_relpath, build_ignore_patterns=self.build_ignore_patterns): candidates.update( self.target_util.get_all_addresses(descendant)) if not self._is_root_relpath(dir_relpath): ancestors = self._collect_ancestor_build_files( build_file.project_tree, os.path.dirname(dir_relpath), self.build_ignore_patterns) for ancestor in ancestors: candidates.update( self.target_util.get_all_addresses(ancestor)) def is_sibling(target): return source_target( target) and target_dirset.intersection( find_source_basedirs(target)) return filter(is_sibling, [ self.target_util.get(a) for a in candidates if a != target.address ])
def _create_mock_build_file(self, dirname): with temporary_dir() as root: os.mkdir(os.path.join(root, dirname)) touch(os.path.join(root, dirname, 'BUILD')) yield BuildFile(FileSystemProjectTree(root), os.path.join(dirname, 'BUILD'))
def scan_build_files(self, base_path): return BuildFile.scan_build_files( self._project_tree, base_path, build_ignore_patterns=self._build_ignore_patterns)
def test_invalid_root_dir_error(self): self.touch("BUILD") with self.assertRaises(ProjectTree.InvalidBuildRootError): BuildFile(FileSystemProjectTree("tmp"), "grandparent/BUILD")
def tearDown(self): BuildRoot().reset() SourceRoot.reset() safe_rmtree(self.build_root) BuildFile.clear_cache()
def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends( plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scopes = [''] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.new_options = options_bootstrapper.get_full_options( known_scopes=known_scopes) self.register_options() self.run_tracker = RunTracker.from_config(self.config) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log( Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser( build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs()
def set_build_file_contents(self, content): self.add_to_build_file('BUILD', content) return BuildFile(self.project_tree, 'BUILD')
def scan_build_files(self, base_path, spec_excludes=None): deprecated_conditional(lambda: spec_excludes is not None, '0.0.75', 'Use build_ignore_patterns consturctor parameter instead.') return BuildFile.scan_build_files(self._project_tree, base_path, spec_excludes, build_ignore_patterns=self._build_ignore_patterns)
def test_raises_parse_error(self): self.add_to_build_file("BUILD", 'foo(name = = "baz")') build_file = BuildFile(FileSystemProjectTree(self.build_root), "BUILD") with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Test some corner cases for the context printing # Error at beginning of BUILD file build_file = self.add_to_build_file( "begin/BUILD", dedent( """ *?&INVALID! = 'foo' target( name='bar', dependencies= [ ':baz', ], ) """ ), ) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error at end of BUILD file build_file = self.add_to_build_file( "end/BUILD", dedent( """ target( name='bar', dependencies= [ ':baz', ], ) *?&INVALID! = 'foo' """ ), ) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error in the middle of BUILD file > 6 lines build_file = self.add_to_build_file( "middle/BUILD", dedent( """ target( name='bar', *?&INVALID! = 'foo' dependencies = [ ':baz', ], ) """ ), ) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error in very short build file. build_file = self.add_to_build_file( "short/BUILD", dedent( """ target(name='bar', dependencies = [':baz'],) *?&INVALID! = 'foo' """ ), ) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file)
def get_build_files_family(self, relpath, build_ignore_patterns=None): return BuildFile.get_build_files_family(self._project_tree, relpath, build_ignore_patterns=self._create_ignore_spec(build_ignore_patterns))
def create_buildfile(self, relpath): return BuildFile(self._project_tree, relpath)
def test_raises_parse_error(self): self.add_to_build_file('BUILD', 'foo(name = = "baz")') build_file = BuildFile(self.build_root, 'BUILD') with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Test some corner cases for the context printing # Error at beginning of BUILD file build_file = self.add_to_build_file( 'begin/BUILD', dedent(''' *?&INVALID! = 'foo' target( name='bar', dependencies= [ ':baz', ], ) ''')) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error at end of BUILD file build_file = self.add_to_build_file( 'end/BUILD', dedent(''' target( name='bar', dependencies= [ ':baz', ], ) *?&INVALID! = 'foo' ''')) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error in the middle of BUILD file > 6 lines build_file = self.add_to_build_file( 'middle/BUILD', dedent(''' target( name='bar', *?&INVALID! = 'foo' dependencies = [ ':baz', ], ) ''')) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file) # Error in very short build file. build_file = self.add_to_build_file( 'short/BUILD', dedent(''' target(name='bar', dependencies = [':baz'],) *?&INVALID! = 'foo' ''')) with self.assertRaises(BuildFileParser.ParseError): self.build_file_parser.parse_build_file(build_file)
def scan_buildfiles(self, base_relpath, build_ignore_patterns=None): return BuildFile.scan_build_files(self._project_tree, base_relpath, build_ignore_patterns=self._create_ignore_spec(build_ignore_patterns))
def __init__(self, run_tracker, root_dir, parser, argv): Command.__init__(self, run_tracker, root_dir, parser, argv) self.target = None self.extra_targets = [] self.extra_requirements = [] self.config = Config.load() interpreters = self.options.interpreters or [b''] self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup(filters=interpreters) interpreters = self.interpreter_cache.select_interpreter( list(self.interpreter_cache.matches(interpreters))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') self.interpreter = interpreters[0] for req in self.options.extra_requirements: self.extra_requirements.append(PythonRequirement(req, use_2to3=True)) # We parse each arg in the context of the cli usage: # ./pants command (options) [spec] (build args) # ./pants command (options) [spec]... -- (build args) # Our command token and our options are parsed out so we see args of the form: # [spec] (build args) # [spec]... -- (build args) binaries = [] for k in range(len(self.args)): arg = self.args.pop(0) if arg == '--': break def not_a_target(debug_msg): self.debug('Not a target, assuming option: %s.' % e) # We failed to parse the arg as a target or else it was in valid address format but did not # correspond to a real target. Assume this is the 1st of the build args and terminate # processing args for target addresses. self.args.insert(0, arg) target = None try: print(root_dir, arg) # import pdb; pdb.set_trace() self.build_file_parser.inject_spec_closure_into_build_graph(arg, self.build_graph) spec_path, target_name = parse_spec(arg) build_file = BuildFile(root_dir, spec_path) address = BuildFileAddress(build_file, target_name) target = self.build_graph.get_target(address) if target is None: not_a_target(debug_msg='Unrecognized target') break except Exception as e: not_a_target(debug_msg=e) break if isinstance(target, PythonBinary): binaries.append(target) else: self.extra_targets.append(target) if len(binaries) == 0: # treat as a chroot pass elif len(binaries) == 1: # We found a binary and are done, the rest of the args get passed to it self.target = binaries[0] else: self.error('Can only process 1 binary target, %s contains %d:\n\t%s' % ( arg, len(binaries), '\n\t'.join(str(binary.address) for binary in binaries) )) if self.target is None: if not self.extra_targets: self.error('No valid target specified!') self.target = self.extra_targets.pop(0)
def create_buildfile(self, path): return BuildFile(FileSystemProjectTree(self.build_root), path)