def test_noop_parse(self): with self.workspace('BUILD') as root_dir: parser = BuildFileParser(root_dir=root_dir) build_file = BuildFile(root_dir, '') parser.parse_build_file(build_file) registered_proxies = set(parser._target_proxy_by_address.values()) self.assertEqual(len(registered_proxies), 0)
def test_file_have_coding_utf8(self): """ Look through all .py files and ensure they start with the line '# coding=utf8' """ build_file_parser = BuildFileParser(get_buildroot()) load_backends_from_source(build_file_parser) def has_hand_coded_python_files(tgt): return (not tgt.is_synthetic) and tgt.is_original and tgt.has_sources('.py') nonconforming_files = [] for target in build_file_parser.scan().targets(has_hand_coded_python_files): for src in target.sources_relative_to_buildroot(): with open(os.path.join(get_buildroot(), src), 'r') as python_file: coding_line = python_file.readline() if '' == coding_line and os.path.basename(src) == '__init__.py': continue if coding_line[0:2] == '#!': # Executable file: look for the coding on the second line. coding_line = python_file.readline() if not coding_line.rstrip() == '# coding=utf-8': nonconforming_files.append(src) if len(nonconforming_files) > 0: self.fail('Expected these files to contain first line "# coding=utf8": ' + str(nonconforming_files))
def test_transitive_closure_spec(self): with self.workspace('./BUILD', 'a/BUILD', 'a/b/BUILD') as root_dir: with open(os.path.join(root_dir, './BUILD'), 'w') as build: build.write(dedent(''' fake(name="foo", dependencies=[ 'a', ]) ''')) with open(os.path.join(root_dir, 'a/BUILD'), 'w') as build: build.write(dedent(''' fake(name="a", dependencies=[ 'a/b:bat', ]) ''')) with open(os.path.join(root_dir, 'a/b/BUILD'), 'w') as build: build.write(dedent(''' fake(name="bat") ''')) build_configuration = BuildConfiguration() build_configuration.register_target_alias('fake', Target) parser = BuildFileParser(build_configuration, root_dir=root_dir) build_graph = BuildGraph(self.address_mapper) parser.inject_spec_closure_into_build_graph(':foo', build_graph) self.assertEqual(len(build_graph.dependencies_of(SyntheticAddress.parse(':foo'))), 1)
def test_transitive_closure_spec(self): class FakeTarget(Target): def __init__(self, *args, **kwargs): super(FakeTarget, self).__init__(*args, payload=None, **kwargs) with self.workspace('./BUILD', 'a/BUILD', 'a/b/BUILD') as root_dir: with open(os.path.join(root_dir, './BUILD'), 'w') as build: build.write(dedent(''' fake(name="foo", dependencies=[ 'a', ]) ''')) with open(os.path.join(root_dir, 'a/BUILD'), 'w') as build: build.write(dedent(''' fake(name="a", dependencies=[ 'a/b:bat', ]) ''')) with open(os.path.join(root_dir, 'a/b/BUILD'), 'w') as build: build.write(dedent(''' fake(name="bat") ''')) parser = BuildFileParser(root_dir=root_dir, exposed_objects={}, path_relative_utils={}, target_alias_map={'fake': FakeTarget}) build_graph = BuildGraph() parser.inject_spec_closure_into_build_graph(':foo', build_graph) self.assertEqual(len(build_graph.dependencies_of(SyntheticAddress(':foo'))), 1)
def test_transitive_closure_spec(self): with self.workspace('./BUILD', 'a/BUILD', 'a/b/BUILD') as root_dir: with open(os.path.join(root_dir, './BUILD'), 'w') as build: build.write( dedent(''' fake(name="foo", dependencies=[ 'a', ]) ''')) with open(os.path.join(root_dir, 'a/BUILD'), 'w') as build: build.write( dedent(''' fake(name="a", dependencies=[ 'a/b:bat', ]) ''')) with open(os.path.join(root_dir, 'a/b/BUILD'), 'w') as build: build.write(dedent(''' fake(name="bat") ''')) build_configuration = BuildConfiguration() build_configuration.register_target_alias('fake', Target) parser = BuildFileParser(build_configuration, root_dir=root_dir) build_graph = BuildGraph(self.address_mapper) parser.inject_spec_closure_into_build_graph(':foo', build_graph) self.assertEqual( len(build_graph.dependencies_of( SyntheticAddress.parse(':foo'))), 1)
def setUp(self): self.real_build_root = BuildRoot().path self.build_root = mkdtemp(suffix='_BUILD_ROOT') BuildRoot().path = self.build_root self.create_file('pants.ini') self.build_file_parser = BuildFileParser(self.build_root) self.build_file_parser.register_alias_groups(self.alias_groups) self.build_graph = BuildGraph()
def test_build_file_parser_error_hierarcy(self): """Exception handling code depends on the fact that all explicit exceptions from BuildFileParser are subclassed from the BuildFileParserError base class. """ self.assertIsInstance(BuildFileParser.BuildFileScanError(), BuildFileParser.BuildFileParserError) self.assertIsInstance(BuildFileParser.AddressableConflictException(), BuildFileParser.BuildFileParserError) self.assertIsInstance(BuildFileParser.SiblingConflictException(), BuildFileParser.BuildFileParserError) self.assertIsInstance(BuildFileParser.ParseError(), BuildFileParser.BuildFileParserError) self.assertIsInstance(BuildFileParser.ExecuteError(), BuildFileParser.BuildFileParserError)
def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Add any extra paths to python path (eg for loading extra source backends) extra_paths = self.config.getlist('backends', 'python-path', []) if extra_paths: sys.path.extend(extra_paths) # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends(plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scopes = [''] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options(known_scopes=known_scopes) self.register_options() self.run_tracker = RunTracker.from_config(self.config) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser(build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs()
def setUp(self): super(BaseTest, self).setUp() Goal.clear() Subsystem.reset() self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) self.addCleanup(safe_rmtree, self.build_root) self.pants_workdir = os.path.join(self.build_root, '.pants.d') safe_mkdir(self.pants_workdir) self.options = defaultdict(dict) # scope -> key-value mapping. self.options[''] = { 'pants_workdir': self.pants_workdir, 'pants_supportdir': os.path.join(self.build_root, 'build-support'), 'pants_distdir': os.path.join(self.build_root, 'dist'), 'pants_configdir': os.path.join(self.build_root, 'config'), 'cache_key_gen_version': '0-test', } BuildRoot().path = self.build_root self.addCleanup(BuildRoot().reset) # We need a pants.ini, even if empty. get_buildroot() uses its presence. self.create_file('pants.ini') self._build_configuration = BuildConfiguration() self._build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(self._build_configuration, self.build_root) self.address_mapper = BuildFileAddressMapper(self.build_file_parser, FilesystemBuildFile) self.build_graph = BuildGraph(address_mapper=self.address_mapper)
def test_invoke_assemble(self): bfp = BuildFileParser('.', '') # we get our doc'able symbols from a BuildFileParser. # Invoke that functionality without blowing up: syms = assemble(build_file_parser=bfp) # These symbols snuck into old dictionaries, make sure they don't again: for unexpected in ['__builtins__', 'Target']: self.assertTrue(unexpected not in syms.keys(), "Found %s" % unexpected)
def get_syms(): r = {} vc = BuildFileParser.report_registered_context() for s in vc: if s in PREDEFS: continue o = vc[s] r[s] = o return r
def make_default_build_file_parser(build_root): from pants.base.build_file_aliases import ( target_aliases, object_aliases, applicative_path_relative_util_aliases, partial_path_relative_util_aliases) for alias, target_type in target_aliases.items(): BuildFileParser.register_target_alias(alias, target_type) for alias, obj in object_aliases.items(): BuildFileParser.register_exposed_object(alias, obj) for alias, util in applicative_path_relative_util_aliases.items(): BuildFileParser.register_applicative_path_relative_util(alias, util) for alias, util in partial_path_relative_util_aliases.items(): BuildFileParser.register_partial_path_relative_util(alias, util) return BuildFileParser(root_dir=build_root)
def setUp(self): self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) BuildRoot().path = self.build_root self.create_file('pants.ini') build_configuration = BuildConfiguration() build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(build_configuration, self.build_root) self.build_graph = BuildGraph()
def setUp(self): Goal.clear() self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) BuildRoot().path = self.build_root self.create_file('pants.ini') build_configuration = BuildConfiguration() build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(build_configuration, self.build_root) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(address_mapper=self.address_mapper)
def test_transitive_closure_spec(self): class FakeTarget(Target): def __init__(self, *args, **kwargs): super(FakeTarget, self).__init__(*args, payload=None, **kwargs) with self.workspace('./BUILD', 'a/BUILD', 'a/b/BUILD') as root_dir: with open(os.path.join(root_dir, './BUILD'), 'w') as build: build.write( dedent(''' fake(name="foo", dependencies=[ 'a', ]) ''')) with open(os.path.join(root_dir, 'a/BUILD'), 'w') as build: build.write( dedent(''' fake(name="a", dependencies=[ 'a/b:bat', ]) ''')) with open(os.path.join(root_dir, 'a/b/BUILD'), 'w') as build: build.write(dedent(''' fake(name="bat") ''')) parser = BuildFileParser(root_dir=root_dir, exposed_objects={}, path_relative_utils={}, target_alias_map={'fake': FakeTarget}) build_graph = BuildGraph() parser.inject_spec_closure_into_build_graph(':foo', build_graph) self.assertEqual( len(build_graph.dependencies_of(SyntheticAddress(':foo'))), 1)
def make_default_build_file_parser(build_root): from pants.base.build_file_aliases import (target_aliases, object_aliases, applicative_path_relative_util_aliases, partial_path_relative_util_aliases) for alias, target_type in target_aliases.items(): BuildFileParser.register_target_alias(alias, target_type) for alias, obj in object_aliases.items(): BuildFileParser.register_exposed_object(alias, obj) for alias, util in applicative_path_relative_util_aliases.items(): BuildFileParser.register_applicative_path_relative_util(alias, util) for alias, util in partial_path_relative_util_aliases.items(): BuildFileParser.register_partial_path_relative_util(alias, util) return BuildFileParser(root_dir=build_root)
def test_file_have_coding_utf8(self): """ Look through all .py files and ensure they start with the line '# coding=utf8' """ config = Config.load() backend_packages = config.getlist('backends', 'packages') build_configuration = load_plugins_and_backends( backends=backend_packages) build_file_parser = BuildFileParser( root_dir=get_buildroot(), build_configuration=build_configuration) address_mapper = BuildFileAddressMapper(build_file_parser) build_graph = BuildGraph(address_mapper=address_mapper) for address in address_mapper.scan_addresses( get_buildroot(), spec_excludes=[config.getdefault('pants_workdir')]): build_graph.inject_address_closure(address) def has_hand_coded_python_files(tgt): return (not tgt.is_synthetic ) and tgt.is_original and tgt.has_sources('.py') nonconforming_files = [] for target in build_graph.targets(has_hand_coded_python_files): for src in target.sources_relative_to_buildroot(): with open(os.path.join(get_buildroot(), src), 'r') as python_file: coding_line = python_file.readline() if '' == coding_line and os.path.basename( src) == '__init__.py': continue if coding_line[0:2] == '#!': # Executable file: look for the coding on the second line. coding_line = python_file.readline() if not coding_line.rstrip() == '# coding=utf-8': nonconforming_files.append(src) if len(nonconforming_files) > 0: self.fail( 'Expected these files to contain first line "# coding=utf8": ' + str(nonconforming_files))
def setUp(self): super(BaseTest, self).setUp() Goal.clear() self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) self.new_options = defaultdict(dict) # scope -> key-value mapping. self.new_options[''] = { 'pants_workdir': os.path.join(self.build_root, '.pants.d'), 'pants_supportdir': os.path.join(self.build_root, 'build-support'), 'pants_distdir': os.path.join(self.build_root, 'dist') } BuildRoot().path = self.build_root self.create_file('pants.ini') build_configuration = BuildConfiguration() build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(build_configuration, self.build_root) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(address_mapper=self.address_mapper)
def __init__(self, root_dir, options, build_config, run_tracker, reporting, exiter=sys.exit): """ :param str root_dir: The root directory of the pants workspace (aka the "build root"). :param Options options: The global, pre-initialized Options instance. :param BuildConfiguration build_config: A pre-initialized BuildConfiguration instance. :param Runtracker run_tracker: The global, pre-initialized/running RunTracker instance. :param Reporting reporting: The global, pre-initialized Reporting instance. :param func exiter: A function that accepts an exit code value and exits (for tests, Optional). """ self._root_dir = root_dir self._options = options self._build_config = build_config self._run_tracker = run_tracker self._reporting = reporting self._exiter = exiter self._goals = [] self._targets = [] self._requested_goals = self._options.goals self._target_specs = self._options.target_specs self._help_request = self._options.help_request self._global_options = options.for_global_scope() self._tag = self._global_options.tag self._fail_fast = self._global_options.fail_fast self._spec_excludes = self._global_options.spec_excludes self._explain = self._global_options.explain self._kill_nailguns = self._global_options.kill_nailguns self._build_file_type = self._get_buildfile_type(self._global_options.build_file_rev) self._build_file_parser = BuildFileParser(self._build_config, self._root_dir) self._address_mapper = BuildFileAddressMapper(self._build_file_parser, self._build_file_type) self._build_graph = BuildGraph(self._address_mapper) self._spec_parser = CmdLineSpecParser( self._root_dir, self._address_mapper, spec_excludes=self._spec_excludes, exclude_target_regexps=self._global_options.exclude_target_regexp )
def _run(): """ To add additional paths to sys.path, add a block to the config similar to the following: [main] roots: ['src/python/pants_internal/test/',] """ version = get_version() if len(sys.argv) == 2 and sys.argv[1] == _VERSION_OPTION: _do_exit(version) root_dir = get_buildroot() if not os.path.exists(root_dir): _exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: %s' % root_dir) if len(sys.argv) < 2: argv = ['goal'] else: argv = sys.argv[1:] # Hack to force ./pants -h etc. to redirect to goal. if argv[0] != 'goal' and set(['-h', '--help', 'help']).intersection(argv): argv = ['goal'] + argv parser = optparse.OptionParser(add_help_option=False, version=version) RcFile.install_disable_rc_option(parser) parser.add_option(_LOG_EXIT_OPTION, action='store_true', default=False, dest='log_exit', help = 'Log an exit message on success or failure.') config = Config.load() # XXX(wickman) This should be in the command goal, not un pants_exe.py! run_tracker = RunTracker.from_config(config) report = initial_reporting(config, run_tracker) run_tracker.start(report) url = run_tracker.run_info.get_info('report_url') if url: run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: run_tracker.log(Report.INFO, '(To run a reporting server: ./pants goal server)') build_file_parser = BuildFileParser(root_dir=root_dir, run_tracker=run_tracker) build_graph = BuildGraph(run_tracker=run_tracker) additional_backends = config.getlist('backends', 'packages') load_backends_from_source(build_file_parser, additional_backends=additional_backends) command_class, command_args = _parse_command(root_dir, argv) command = command_class(run_tracker, root_dir, parser, command_args, build_file_parser, build_graph) try: if command.serialized(): def onwait(pid): process = psutil.Process(pid) print('Waiting on pants process %d (%s) to complete' % (pid, ' '.join(process.cmdline)), file=sys.stderr) return True runfile = os.path.join(root_dir, '.pants.run') lock = Lock.acquire(runfile, onwait=onwait) else: lock = Lock.unlocked() try: result = command.run(lock) if result: run_tracker.set_root_outcome(WorkUnit.FAILURE) _do_exit(result) except KeyboardInterrupt: command.cleanup() raise finally: lock.release() finally: run_tracker.end() # Must kill nailguns only after run_tracker.end() is called, because there may still # be pending background work that needs a nailgun. if (hasattr(command.options, 'cleanup_nailguns') and command.options.cleanup_nailguns) \ or config.get('nailgun', 'autokill', default=False): NailgunTask.killall(None)
def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. bootstrap_options = options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Add any extra paths to python path (eg for loading extra source backends) for path in bootstrap_options.for_global_scope().pythonpath: sys.path.append(path) pkg_resources.fixup_namespace_packages(path) # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends( plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] # TODO: Create a 'Subsystem' abstraction instead of special-casing run-tracker here # and in register_options(). known_scopes = ['', 'run-tracker'] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options( known_scopes=known_scopes) self.register_options() # TODO(Eric Ayers) We are missing log messages. Set the log level earlier # Enable standard python logging for code with no handle to a context/work-unit. self._setup_logging() # NB: self.options are needed for this call. self.run_tracker = RunTracker.from_options(self.options) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log( Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser( build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs()
class GoalRunner(object): """Lists installed goals or else executes a named goal.""" def __init__(self, root_dir): """ :param root_dir: The root directory of the pants workspace. """ self.root_dir = root_dir def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. bootstrap_options = options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Add any extra paths to python path (eg for loading extra source backends) for path in bootstrap_options.for_global_scope().pythonpath: sys.path.append(path) pkg_resources.fixup_namespace_packages(path) # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends( plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] # TODO: Create a 'Subsystem' abstraction instead of special-casing run-tracker here # and in register_options(). known_scopes = ['', 'run-tracker'] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options( known_scopes=known_scopes) self.register_options() # TODO(Eric Ayers) We are missing log messages. Set the log level earlier # Enable standard python logging for code with no handle to a context/work-unit. self._setup_logging() # NB: self.options are needed for this call. self.run_tracker = RunTracker.from_options(self.options) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log( Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser( build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs() @property def spec_excludes(self): # Note: Only call after register_options() has been called. return self.options.for_global_scope().spec_excludes @property def global_options(self): return self.options.for_global_scope() def register_options(self): # Add a 'bootstrap' attribute to the register function, so that register_global can # access the bootstrap option values. def register_global(*args, **kwargs): return self.options.register_global(*args, **kwargs) register_global.bootstrap = self.options.bootstrap_option_values() register_global_options(register_global) # This is the first case we have of non-task, non-global options. # The current implementation special-cases RunTracker, and is temporary. # In the near future it will be replaced with a 'Subsystem' abstraction. # But for now this is useful for kicking the tires. def register_run_tracker(*args, **kwargs): self.options.register('run-tracker', *args, **kwargs) RunTracker.register_options(register_run_tracker) for goal in Goal.all(): goal.register_options(self.options) def _expand_goals_and_specs(self): goals = self.options.goals specs = self.options.target_specs fail_fast = self.options.for_global_scope().fail_fast for goal in goals: if BuildFile.from_cache(get_buildroot(), goal, must_exist=False).exists(): logger.warning( " Command-line argument '{0}' is ambiguous and was assumed to be " "a goal. If this is incorrect, disambiguate it with ./{0}." .format(goal)) if self.options.print_help_if_requested(): sys.exit(0) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): spec_parser = CmdLineSpecParser( self.root_dir, self.address_mapper, spec_excludes=self.spec_excludes, exclude_target_regexps=self.global_options. exclude_target_regexp) with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): for spec in specs: for address in spec_parser.parse_addresses( spec, fail_fast): self.build_graph.inject_address_closure(address) self.targets.append( self.build_graph.get_target(address)) self.goals = [Goal.by_name(goal) for goal in goals] def run(self): def fail(): self.run_tracker.set_root_outcome(WorkUnit.FAILURE) kill_nailguns = self.options.for_global_scope().kill_nailguns try: result = self._do_run() if result: fail() except KeyboardInterrupt: fail() # On ctrl-c we always kill nailguns, otherwise they might keep running # some heavyweight compilation and gum up the system during a subsequent run. kill_nailguns = True raise except Exception: fail() raise finally: self.run_tracker.end() # Must kill nailguns only after run_tracker.end() is called, otherwise there may still # be pending background work that needs a nailgun. if kill_nailguns: # TODO: This is JVM-specific and really doesn't belong here. # TODO: Make this more selective? Only kill nailguns that affect state? # E.g., checkstyle may not need to be killed. NailgunTask.killall() return result def _do_run(self): # Update the reporting settings, now that we have flags etc. def is_quiet_task(): for goal in self.goals: if goal.has_task_of_type(QuietTaskMixin): return True return False is_explain = self.global_options.explain update_reporting(self.global_options, is_quiet_task() or is_explain, self.run_tracker) context = Context(config=self.config, options=self.options, run_tracker=self.run_tracker, target_roots=self.targets, requested_goals=self.requested_goals, build_graph=self.build_graph, build_file_parser=self.build_file_parser, address_mapper=self.address_mapper, spec_excludes=self.spec_excludes) unknown = [] for goal in self.goals: if not goal.ordered_task_names(): unknown.append(goal) if unknown: context.log.error('Unknown goal(s): %s\n' % ' '.join(goal.name for goal in unknown)) return 1 engine = RoundEngine() return engine.execute(context, self.goals) def _setup_logging(self): # TODO(John Sirois): Consider moving to straight python logging. The divide between the # context/work-unit logging and standard python logging doesn't buy us anything. # TODO(John Sirois): Support logging.config.fileConfig so a site can setup fine-grained # logging control and we don't need to be the middleman plumbing an option for each python # standard logging knob. # NB: quiet help says 'Squelches all console output apart from errors'. level = 'ERROR' if self.global_options.quiet else self.global_options.level.upper( ) logging_config = { 'version': 1, # required and there is only a version 1 format so far. 'disable_existing_loggers': False } formatters_config = {'brief': {'format': '%(levelname)s] %(message)s'}} handlers_config = { 'console': { 'class': 'logging.StreamHandler', 'formatter': 'brief', # defined above 'level': level } } log_dir = self.global_options.logdir if log_dir: safe_mkdir(log_dir) # This is close to but not quite glog format. Namely the leading levelname is not a single # character and the fractional second is only to millis precision and not micros. glog_date_format = '%m%d %H:%M:%S' glog_format = ( '%(levelname)s %(asctime)s.%(msecs)d %(process)d %(filename)s:%(lineno)d] ' '%(message)s') formatters_config['glog'] = { 'format': glog_format, 'datefmt': glog_date_format } handlers_config['file'] = { 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'glog', # defined above 'level': level, 'filename': os.path.join(log_dir, 'pants.log'), 'maxBytes': 10 * 1024 * 1024, 'backupCount': 4 } logging_config['formatters'] = formatters_config logging_config['handlers'] = handlers_config logging_config['root'] = { 'level': level, 'handlers': handlers_config.keys() } logging.config.dictConfig(logging_config)
def setup(self): options_bootstrapper = OptionsBootstrapper() bootstrap_options = options_bootstrapper.get_bootstrap_options() # Get logging setup prior to loading backends so that they can log as needed. self._setup_logging(bootstrap_options.for_global_scope()) # Add any extra paths to python path (eg for loading extra source backends) for path in bootstrap_options.for_global_scope().pythonpath: sys.path.append(path) pkg_resources.fixup_namespace_packages(path) # Load plugins and backends. plugins = bootstrap_options.for_global_scope().plugins backend_packages = bootstrap_options.for_global_scope().backend_packages build_configuration = load_plugins_and_backends(plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scope_infos = [ScopeInfo.for_global_scope()] # Add scopes for all needed subsystems. subsystems = (set(self.subsystems) | Goal.subsystems() | build_configuration.subsystems()) for subsystem in subsystems: known_scope_infos.append(ScopeInfo(subsystem.options_scope, ScopeInfo.GLOBAL_SUBSYSTEM)) # Add scopes for all tasks in all goals. for goal in Goal.all(): known_scope_infos.extend(filter(None, goal.known_scope_infos())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options(known_scope_infos) self.register_options(subsystems) # Make the options values available to all subsystems. Subsystem._options = self.options # Now that we have options we can instantiate subsystems. self.run_tracker = RunTracker.global_instance() self.reporting = Reporting.global_instance() report = self.reporting.initial_reporting(self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: {}'.format(url)) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser(build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) rev = self.options.for_global_scope().build_file_rev if rev: ScmBuildFile.set_rev(rev) ScmBuildFile.set_scm(get_scm()) build_file_type = ScmBuildFile else: build_file_type = FilesystemBuildFile self.address_mapper = BuildFileAddressMapper(self.build_file_parser, build_file_type) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) # TODO(John Sirois): Kill when source root registration is lifted out of BUILD files. with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): source_root_bootstrapper = SourceRootBootstrapper.global_instance() source_root_bootstrapper.bootstrap(self.address_mapper, self.build_file_parser) self._expand_goals_and_specs() # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info()
class BaseTest(unittest2.TestCase): """A baseclass useful for tests requiring a temporary buildroot.""" def build_path(self, relpath): """Returns the canonical BUILD file path for the given relative build path.""" if os.path.basename(relpath).startswith('BUILD'): return relpath else: return os.path.join(relpath, 'BUILD') def create_dir(self, relpath): """Creates a directory under the buildroot. relpath: The relative path to the directory from the build root. """ path = os.path.join(self.build_root, relpath) safe_mkdir(path) return path def create_file(self, relpath, contents='', mode='w'): """Writes to a file under the buildroot. relpath: The relative path to the file from the build root. contents: A string containing the contents of the file - '' by default.. mode: The mode to write to the file in - over-write by default. """ path = os.path.join(self.build_root, relpath) with safe_open(path, mode=mode) as fp: fp.write(contents) return path def add_to_build_file(self, relpath, target): """Adds the given target specification to the BUILD file at relpath. relpath: The relative path to the BUILD file from the build root. target: A string containing the target definition as it would appear in a BUILD file. """ self.create_file(self.build_path(relpath), target, mode='a') def make_target(self, spec='', target_type=Target, dependencies=None, derived_from=None, **kwargs): address = SyntheticAddress.parse(spec) target = target_type(name=address.target_name, address=address, build_graph=self.build_graph, **kwargs) dependencies = dependencies or [] self.build_graph.inject_target(target, dependencies=[dep.address for dep in dependencies], derived_from=derived_from) return target @property def alias_groups(self): return BuildFileAliases.create(targets={'dependencies': Dependencies}) def setUp(self): self.real_build_root = BuildRoot().path self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) BuildRoot().path = self.build_root self.create_file('pants.ini') build_configuration = BuildConfiguration() build_configuration.register_aliases(self.alias_groups) self.build_file_parser = BuildFileParser(build_configuration, self.build_root) self.build_graph = BuildGraph() def config(self, overrides=''): """Returns a config valid for the test build root.""" if overrides: with temporary_file() as fp: fp.write(overrides) fp.close() with environment_as(PANTS_CONFIG_OVERRIDE=fp.name): return Config.load() else: return Config.load() def create_options(self, **kwargs): return dict(**kwargs) def context(self, config='', options=None, target_roots=None, **kwargs): return create_context(config=self.config(overrides=config), options=self.create_options(**(options or {})), target_roots=target_roots, build_graph=self.build_graph, build_file_parser=self.build_file_parser, **kwargs) def tearDown(self): BuildRoot().reset() SourceRoot.reset() safe_rmtree(self.build_root) BuildFileCache.clear() def target(self, spec): """Resolves the given target address to a Target object. address: The BUILD target address to resolve. Returns the corresponding Target or else None if the address does not point to a defined Target. """ if self.build_graph.get_target_from_spec(spec) is None: self.build_file_parser.inject_spec_closure_into_build_graph(spec, self.build_graph) return self.build_graph.get_target_from_spec(spec) def create_files(self, path, files): """Writes to a file under the buildroot with contents same as file name. path: The relative path to the file from the build root. files: List of file names. """ for f in files: self.create_file(os.path.join(path, f), contents=f) def create_library(self, path, target_type, name, sources, **kwargs): """Creates a library target of given type at the BUILD file at path with sources path: The relative path to the BUILD file from the build root. target_type: valid pants target type. name: Name of the library target. sources: List of source file at the path relative to path. **kwargs: Optional attributes that can be set for any library target. Currently it includes support for resources and java_sources """ self.create_files(path, sources) self.add_to_build_file(path, dedent(''' %(target_type)s(name='%(name)s', sources=%(sources)s, %(resources)s %(java_sources)s ) ''' % dict(target_type=target_type, name=name, sources=repr(sources or []), resources=('resources=[pants("%s")],' % kwargs.get('resources') if kwargs.has_key('resources') else ''), java_sources=('java_sources=[%s]' % ','.join(map(lambda str_target: 'pants("%s")' % str_target, kwargs.get('java_sources'))) if kwargs.has_key('java_sources') else ''), ))) return self.target('%s:%s' % (path, name)) def create_resources(self, path, name, *sources): return self.create_library(path, 'resources', name, sources) @contextmanager def workspace(self, *buildfiles): with temporary_dir() as root_dir: with BuildRoot().temporary(root_dir): with pushd(root_dir): for buildfile in buildfiles: touch(os.path.join(root_dir, buildfile)) yield os.path.realpath(root_dir)
class BaseTest(unittest.TestCase): """A baseclass useful for tests requiring a temporary buildroot.""" def build_path(self, relpath): """Returns the canonical BUILD file path for the given relative build path.""" if os.path.basename(relpath).startswith('BUILD'): return relpath else: return os.path.join(relpath, 'BUILD') def create_dir(self, relpath): """Creates a directory under the buildroot. relpath: The relative path to the directory from the build root. """ safe_mkdir(os.path.join(self.build_root, relpath)) def create_file(self, relpath, contents='', mode='w'): """Writes to a file under the buildroot. relpath: The relative path to the file from the build root. contents: A string containing the contents of the file - '' by default.. mode: The mode to write to the file in - over-write by default. """ with safe_open(os.path.join(self.build_root, relpath), mode=mode) as fp: fp.write(contents) def add_to_build_file(self, relpath, target): """Adds the given target specification to the BUILD file at relpath. relpath: The relative path to the BUILD file from the build root. target: A string containing the target definition as it would appear in a BUILD file. """ self.create_file(self.build_path(relpath), target, mode='a') def make_target(self, spec='', target_type=Target, dependencies=None, derived_from=None, **kwargs): address = SyntheticAddress.parse(spec) target = target_type(name=address.target_name, address=address, build_graph=self.build_graph, **kwargs) dependencies = dependencies or [] self.build_graph.inject_target(target, dependencies=[dep.address for dep in dependencies], derived_from=derived_from) return target @property def alias_groups(self): return {'target_aliases': {'dependencies': Dependencies}} def setUp(self): self.real_build_root = BuildRoot().path self.build_root = mkdtemp(suffix='_BUILD_ROOT') BuildRoot().path = self.build_root self.create_file('pants.ini') self.build_file_parser = BuildFileParser(self.build_root) self.build_file_parser.register_alias_groups(self.alias_groups) self.build_graph = BuildGraph() def config(self, overrides=''): """Returns a config valid for the test build root.""" if overrides: with temporary_file() as fp: fp.write(overrides) fp.close() with environment_as(PANTS_CONFIG_OVERRIDE=fp.name): return Config.load() else: return Config.load() def create_options(self, **kwargs): return dict(**kwargs) def context(self, config='', options=None, target_roots=None, **kwargs): return create_context(config=self.config(overrides=config), options=self.create_options(**(options or {})), target_roots=target_roots, build_graph=self.build_graph, build_file_parser=self.build_file_parser, **kwargs) def tearDown(self): BuildRoot().reset() SourceRoot.reset() safe_rmtree(self.build_root) BuildFileCache.clear() self.build_file_parser.clear_registered_context() def target(self, spec): """Resolves the given target address to a Target object. address: The BUILD target address to resolve. Returns the corresponding Target or else None if the address does not point to a defined Target. """ if self.build_graph.get_target_from_spec(spec) is None: self.build_file_parser.inject_spec_closure_into_build_graph(spec, self.build_graph) return self.build_graph.get_target_from_spec(spec) def create_files(self, path, files): """Writes to a file under the buildroot with contents same as file name. path: The relative path to the file from the build root. files: List of file names. """ for f in files: self.create_file(os.path.join(path, f), contents=f) def create_library(self, path, target_type, name, sources, **kwargs): """Creates a library target of given type at the BUILD file at path with sources path: The relative path to the BUILD file from the build root. target_type: valid pants target type. name: Name of the library target. sources: List of source file at the path relative to path. **kwargs: Optional attributes that can be set for any library target. Currently it includes support for resources and java_sources """ self.create_files(path, sources) self.add_to_build_file(path, dedent(''' %(target_type)s(name='%(name)s', sources=%(sources)s, %(resources)s %(java_sources)s ) ''' % dict(target_type=target_type, name=name, sources=repr(sources or []), resources=('resources=[pants("%s")],' % kwargs.get('resources') if kwargs.has_key('resources') else ''), java_sources=('java_sources=[%s]' % ','.join(map(lambda str_target: 'pants("%s")' % str_target, kwargs.get('java_sources'))) if kwargs.has_key('java_sources') else ''), ))) return self.target('%s:%s' % (path, name)) def create_resources(self, path, name, *sources): return self.create_library(path, 'resources', name, sources) @contextmanager def workspace(self, *buildfiles): with temporary_dir() as root_dir: with BuildRoot().temporary(root_dir): with pushd(root_dir): for buildfile in buildfiles: touch(os.path.join(root_dir, buildfile)) yield os.path.realpath(root_dir)
class GoalRunner(object): """Lists installed goals or else executes a named goal.""" def __init__(self, root_dir): """ :param root_dir: The root directory of the pants workspace. """ self.root_dir = root_dir def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. bootstrap_options = options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Add any extra paths to python path (eg for loading extra source backends) sys.path.extend(bootstrap_options.for_global_scope().pythonpath) # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends(plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scopes = [''] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options(known_scopes=known_scopes) self.register_options() self.run_tracker = RunTracker.from_config(self.config) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser(build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs() def get_spec_excludes(self): # Note: Only call after register_options() has been called. return [os.path.join(self.root_dir, spec_exclude) for spec_exclude in self.options.for_global_scope().spec_excludes] @property def global_options(self): return self.options.for_global_scope() def register_options(self): # Add a 'bootstrap' attribute to the register function, so that register_global can # access the bootstrap option values. def register_global(*args, **kwargs): return self.options.register_global(*args, **kwargs) register_global.bootstrap = self.options.bootstrap_option_values() register_global_options(register_global) for goal in Goal.all(): goal.register_options(self.options) def _expand_goals_and_specs(self): logger = logging.getLogger(__name__) goals = self.options.goals specs = self.options.target_specs fail_fast = self.options.for_global_scope().fail_fast for goal in goals: if BuildFile.from_cache(get_buildroot(), goal, must_exist=False).exists(): logger.warning(" Command-line argument '{0}' is ambiguous and was assumed to be " "a goal. If this is incorrect, disambiguate it with ./{0}.".format(goal)) if self.options.is_help: self.options.print_help(goals=goals) sys.exit(0) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): spec_parser = CmdLineSpecParser(self.root_dir, self.address_mapper, spec_excludes=self.get_spec_excludes(), exclude_target_regexps=self.global_options.exclude_target_regexp) with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): for spec in specs: for address in spec_parser.parse_addresses(spec, fail_fast): self.build_graph.inject_address_closure(address) self.targets.append(self.build_graph.get_target(address)) self.goals = [Goal.by_name(goal) for goal in goals] def run(self): def fail(): self.run_tracker.set_root_outcome(WorkUnit.FAILURE) kill_nailguns = self.options.for_global_scope().kill_nailguns try: result = self._do_run() if result: fail() except KeyboardInterrupt: fail() # On ctrl-c we always kill nailguns, otherwise they might keep running # some heavyweight compilation and gum up the system during a subsequent run. kill_nailguns = True raise except Exception: fail() raise finally: self.run_tracker.end() # Must kill nailguns only after run_tracker.end() is called, otherwise there may still # be pending background work that needs a nailgun. if kill_nailguns: # TODO: This is JVM-specific and really doesn't belong here. # TODO: Make this more selective? Only kill nailguns that affect state? # E.g., checkstyle may not need to be killed. NailgunTask.killall(log.info) return result def _do_run(self): # TODO(John Sirois): Consider moving to straight python logging. The divide between the # context/work-unit logging and standard python logging doesn't buy us anything. # TODO(Eric Ayers) We are missing log messages. Set the log level earlier # Enable standard python logging for code with no handle to a context/work-unit. if self.global_options.level: LogOptions.set_stderr_log_level((self.global_options.level or 'info').upper()) logdir = self.global_options.logdir or self.config.get('goals', 'logdir', default=None) if logdir: safe_mkdir(logdir) LogOptions.set_log_dir(logdir) prev_log_level = None # If quiet, temporarily change stderr log level to kill init's output. if self.global_options.quiet: prev_log_level = LogOptions.loglevel_name(LogOptions.stderr_log_level()) # loglevel_name can fail, so only change level if we were able to get the current one. if prev_log_level is not None: LogOptions.set_stderr_log_level(LogOptions._LOG_LEVEL_NONE_KEY) log.init('goals') if prev_log_level is not None: LogOptions.set_stderr_log_level(prev_log_level) else: log.init() # Update the reporting settings, now that we have flags etc. def is_quiet_task(): for goal in self.goals: if goal.has_task_of_type(QuietTaskMixin): return True return False is_explain = self.global_options.explain update_reporting(self.global_options, is_quiet_task() or is_explain, self.run_tracker) context = Context( config=self.config, options=self.options, run_tracker=self.run_tracker, target_roots=self.targets, requested_goals=self.requested_goals, build_graph=self.build_graph, build_file_parser=self.build_file_parser, address_mapper=self.address_mapper, spec_excludes=self.get_spec_excludes() ) unknown = [] for goal in self.goals: if not goal.ordered_task_names(): unknown.append(goal) if unknown: context.log.error('Unknown goal(s): %s\n' % ' '.join(goal.name for goal in unknown)) return 1 engine = RoundEngine() return engine.execute(context, self.goals)
class Command(object): """Baseclass for all pants subcommands.""" @staticmethod def get_command(name): return Command._commands.get(name, None) @staticmethod def all_commands(): return Command._commands.keys() _commands = {} @classmethod def _register(cls): """Register a command class.""" command_name = cls.__dict__.get('__command__', None) if command_name: Command._commands[command_name] = cls @staticmethod def scan_addresses(root_dir, base_path=None): """Parses all targets available in BUILD files under base_path and returns their addresses. If no base_path is specified, root_dir is assumed to be the base_path""" addresses = OrderedSet() for buildfile in BuildFile.scan_buildfiles(root_dir, base_path): addresses.update(Target.get_all_addresses(buildfile)) return addresses @classmethod def serialized(cls): return False def __init__(self, run_tracker, root_dir, parser, args): """run_tracker: The (already opened) RunTracker to track this run with root_dir: The root directory of the pants workspace parser: an OptionParser args: the subcommand arguments to parse""" self.run_tracker = run_tracker self.root_dir = root_dir # TODO(pl): Gross that we're doing a local import here, but this has dependendencies # way down into specific Target subclasses, and I'd prefer to make it explicit that this # import is in many ways similar to to third party plugin imports below. from pants.base.build_file_aliases import (target_aliases, object_aliases, applicative_path_relative_util_aliases, partial_path_relative_util_aliases) for alias, target_type in target_aliases.items(): BuildFileParser.register_target_alias(alias, target_type) for alias, obj in object_aliases.items(): BuildFileParser.register_exposed_object(alias, obj) for alias, util in applicative_path_relative_util_aliases.items(): BuildFileParser.register_applicative_path_relative_util(alias, util) for alias, util in partial_path_relative_util_aliases.items(): BuildFileParser.register_partial_path_relative_util(alias, util) config = Config.load() # TODO(pl): This is awful but I need something quick and dirty to support # injection of third party Targets and tools into BUILD file context plugins = config.getlist('plugins', 'entry_points', default=[]) for entry_point_spec in plugins: module, entry_point = entry_point_spec.split(':') plugin_module = __import__(module, globals(), locals(), [entry_point], 0) getattr(plugin_module, entry_point)(config) self.build_file_parser = BuildFileParser(root_dir=self.root_dir, run_tracker=self.run_tracker) self.build_graph = BuildGraph(run_tracker=self.run_tracker) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in config.getlist('goals', 'bootstrap_buildfiles', default=[]): # try: build_file = BuildFile(root_dir=self.root_dir, relpath=path) self.build_file_parser.parse_build_file_family(build_file) # except (TypeError, ImportError): # error(path, include_traceback=True) # except (IOError, SyntaxError): # error(path) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Override the OptionParser's error with more useful output def error(message=None, show_help=True): if message: print(message + '\n') if show_help: parser.print_help() parser.exit(status=1) parser.error = error self.error = error self.setup_parser(parser, args) self.options, self.args = parser.parse_args(args) self.parser = parser def setup_parser(self, parser, args): """Subclasses should override and confiure the OptionParser to reflect the subcommand option and argument requirements. Upon successful construction, subcommands will be able to access self.options and self.args.""" pass def error(self, message=None, show_help=True): """Reports the error message, optionally followed by pants help, and then exits.""" def run(self, lock): """Subcommands that are serialized() should override if they need the ability to interact with the global command lock. The value returned should be an int, 0 indicating success and any other value indicating failure.""" return self.execute() def execute(self): """Subcommands that do not require serialization should override to perform the command action. The value returned should be an int, 0 indicating success and any other value indicating failure.""" raise NotImplementedError('Either run(lock) or execute() must be over-ridden.') def cleanup(self): """Called on SIGINT (e.g., when the user hits ctrl-c). Subcommands may override to perform cleanup before exit.""" pass
class Command(object): """Baseclass for all pants subcommands.""" @staticmethod def get_command(name): return Command._commands.get(name, None) @staticmethod def all_commands(): return Command._commands.keys() _commands = {} @classmethod def _register(cls): """Register a command class.""" command_name = cls.__dict__.get('__command__', None) if command_name: Command._commands[command_name] = cls @staticmethod def scan_addresses(root_dir, base_path=None): """Parses all targets available in BUILD files under base_path and returns their addresses. If no base_path is specified, root_dir is assumed to be the base_path""" addresses = OrderedSet() for buildfile in BuildFile.scan_buildfiles(root_dir, base_path): addresses.update(Target.get_all_addresses(buildfile)) return addresses @classmethod def serialized(cls): return False def __init__(self, run_tracker, root_dir, parser, args): """run_tracker: The (already opened) RunTracker to track this run with root_dir: The root directory of the pants workspace parser: an OptionParser args: the subcommand arguments to parse""" self.run_tracker = run_tracker self.root_dir = root_dir # TODO(pl): Gross that we're doing a local import here, but this has dependendencies # way down into specific Target subclasses, and I'd prefer to make it explicit that this # import is in many ways similar to to third party plugin imports below. from pants.base.build_file_aliases import ( target_aliases, object_aliases, applicative_path_relative_util_aliases, partial_path_relative_util_aliases) for alias, target_type in target_aliases.items(): BuildFileParser.register_target_alias(alias, target_type) for alias, obj in object_aliases.items(): BuildFileParser.register_exposed_object(alias, obj) for alias, util in applicative_path_relative_util_aliases.items(): BuildFileParser.register_applicative_path_relative_util( alias, util) for alias, util in partial_path_relative_util_aliases.items(): BuildFileParser.register_partial_path_relative_util(alias, util) config = Config.load() # TODO(pl): This is awful but I need something quick and dirty to support # injection of third party Targets and tools into BUILD file context plugins = config.getlist('plugins', 'entry_points', default=[]) for entry_point_spec in plugins: module, entry_point = entry_point_spec.split(':') plugin_module = __import__(module, globals(), locals(), [entry_point], 0) getattr(plugin_module, entry_point)(config) self.build_file_parser = BuildFileParser(root_dir=self.root_dir, run_tracker=self.run_tracker) self.build_graph = BuildGraph(run_tracker=self.run_tracker) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in config.getlist('goals', 'bootstrap_buildfiles', default=[]): # try: build_file = BuildFile(root_dir=self.root_dir, relpath=path) self.build_file_parser.parse_build_file_family(build_file) # except (TypeError, ImportError): # error(path, include_traceback=True) # except (IOError, SyntaxError): # error(path) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Override the OptionParser's error with more useful output def error(message=None, show_help=True): if message: print(message + '\n') if show_help: parser.print_help() parser.exit(status=1) parser.error = error self.error = error self.setup_parser(parser, args) self.options, self.args = parser.parse_args(args) self.parser = parser def setup_parser(self, parser, args): """Subclasses should override and confiure the OptionParser to reflect the subcommand option and argument requirements. Upon successful construction, subcommands will be able to access self.options and self.args.""" pass def error(self, message=None, show_help=True): """Reports the error message, optionally followed by pants help, and then exits.""" def run(self, lock): """Subcommands that are serialized() should override if they need the ability to interact with the global command lock. The value returned should be an int, 0 indicating success and any other value indicating failure.""" return self.execute() def execute(self): """Subcommands that do not require serialization should override to perform the command action. The value returned should be an int, 0 indicating success and any other value indicating failure.""" raise NotImplementedError( 'Either run(lock) or execute() must be over-ridden.') def cleanup(self): """Called on SIGINT (e.g., when the user hits ctrl-c). Subcommands may override to perform cleanup before exit.""" pass
def __init__(self, run_tracker, root_dir, parser, args): """run_tracker: The (already opened) RunTracker to track this run with root_dir: The root directory of the pants workspace parser: an OptionParser args: the subcommand arguments to parse""" self.run_tracker = run_tracker self.root_dir = root_dir # TODO(pl): Gross that we're doing a local import here, but this has dependendencies # way down into specific Target subclasses, and I'd prefer to make it explicit that this # import is in many ways similar to to third party plugin imports below. from pants.base.build_file_aliases import ( target_aliases, object_aliases, applicative_path_relative_util_aliases, partial_path_relative_util_aliases) for alias, target_type in target_aliases.items(): BuildFileParser.register_target_alias(alias, target_type) for alias, obj in object_aliases.items(): BuildFileParser.register_exposed_object(alias, obj) for alias, util in applicative_path_relative_util_aliases.items(): BuildFileParser.register_applicative_path_relative_util( alias, util) for alias, util in partial_path_relative_util_aliases.items(): BuildFileParser.register_partial_path_relative_util(alias, util) config = Config.load() # TODO(pl): This is awful but I need something quick and dirty to support # injection of third party Targets and tools into BUILD file context plugins = config.getlist('plugins', 'entry_points', default=[]) for entry_point_spec in plugins: module, entry_point = entry_point_spec.split(':') plugin_module = __import__(module, globals(), locals(), [entry_point], 0) getattr(plugin_module, entry_point)(config) self.build_file_parser = BuildFileParser(root_dir=self.root_dir, run_tracker=self.run_tracker) self.build_graph = BuildGraph(run_tracker=self.run_tracker) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in config.getlist('goals', 'bootstrap_buildfiles', default=[]): # try: build_file = BuildFile(root_dir=self.root_dir, relpath=path) self.build_file_parser.parse_build_file_family(build_file) # except (TypeError, ImportError): # error(path, include_traceback=True) # except (IOError, SyntaxError): # error(path) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Override the OptionParser's error with more useful output def error(message=None, show_help=True): if message: print(message + '\n') if show_help: parser.print_help() parser.exit(status=1) parser.error = error self.error = error self.setup_parser(parser, args) self.options, self.args = parser.parse_args(args) self.parser = parser
def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. bootstrap_options = options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Get logging setup prior to loading backends so that they can log as needed. self._setup_logging(bootstrap_options.for_global_scope()) # Add any extra paths to python path (eg for loading extra source backends) for path in bootstrap_options.for_global_scope().pythonpath: sys.path.append(path) pkg_resources.fixup_namespace_packages(path) # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends(plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scopes = [''] # Add scopes for global subsystem instances. for subsystem_type in set(self.subsystems) | Goal.global_subsystem_types(): known_scopes.append(subsystem_type.qualify_scope(Options.GLOBAL_SCOPE)) # Add scopes for all tasks in all goals. for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options(known_scopes=known_scopes) self.register_options() # Make the options values available to all subsystems. Subsystem._options = self.options # Now that we have options we can instantiate subsystems. self.run_tracker = RunTracker.global_instance() report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: {}'.format(url)) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser(build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) rev = self.options.for_global_scope().build_file_rev if rev: ScmBuildFile.set_rev(rev) ScmBuildFile.set_scm(get_scm()) build_file_type = ScmBuildFile else: build_file_type = FilesystemBuildFile self.address_mapper = BuildFileAddressMapper(self.build_file_parser, build_file_type) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = self.address_mapper.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) self._expand_goals_and_specs() # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info()
class GoalRunner(object): """Lists installed goals or else executes a named goal.""" def __init__(self, root_dir): """ :param root_dir: The root directory of the pants workspace. """ self.root_dir = root_dir def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. bootstrap_options = options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Get logging setup prior to loading backends so that they can log as needed. self._setup_logging(bootstrap_options.for_global_scope()) # Add any extra paths to python path (eg for loading extra source backends) for path in bootstrap_options.for_global_scope().pythonpath: sys.path.append(path) pkg_resources.fixup_namespace_packages(path) # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends(plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] # TODO: Create a 'Subsystem' abstraction instead of special-casing run-tracker here # and in register_options(). known_scopes = ['', 'run-tracker'] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options(known_scopes=known_scopes) self.register_options() self.run_tracker = RunTracker.from_options(self.options) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser(build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs() @property def spec_excludes(self): # Note: Only call after register_options() has been called. return self.options.for_global_scope().spec_excludes @property def global_options(self): return self.options.for_global_scope() def register_options(self): # Add a 'bootstrap' attribute to the register function, so that register_global can # access the bootstrap option values. def register_global(*args, **kwargs): return self.options.register_global(*args, **kwargs) register_global.bootstrap = self.options.bootstrap_option_values() register_global_options(register_global) # This is the first case we have of non-task, non-global options. # The current implementation special-cases RunTracker, and is temporary. # In the near future it will be replaced with a 'Subsystem' abstraction. # But for now this is useful for kicking the tires. def register_run_tracker(*args, **kwargs): self.options.register('run-tracker', *args, **kwargs) RunTracker.register_options(register_run_tracker) for goal in Goal.all(): goal.register_options(self.options) def _expand_goals_and_specs(self): goals = self.options.goals specs = self.options.target_specs fail_fast = self.options.for_global_scope().fail_fast for goal in goals: if BuildFile.from_cache(get_buildroot(), goal, must_exist=False).exists(): logger.warning(" Command-line argument '{0}' is ambiguous and was assumed to be " "a goal. If this is incorrect, disambiguate it with ./{0}.".format(goal)) if self.options.print_help_if_requested(): sys.exit(0) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): spec_parser = CmdLineSpecParser(self.root_dir, self.address_mapper, spec_excludes=self.spec_excludes, exclude_target_regexps=self.global_options.exclude_target_regexp) with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): for spec in specs: for address in spec_parser.parse_addresses(spec, fail_fast): self.build_graph.inject_address_closure(address) self.targets.append(self.build_graph.get_target(address)) self.goals = [Goal.by_name(goal) for goal in goals] def run(self): def fail(): self.run_tracker.set_root_outcome(WorkUnit.FAILURE) kill_nailguns = self.options.for_global_scope().kill_nailguns try: result = self._do_run() if result: fail() except KeyboardInterrupt: fail() # On ctrl-c we always kill nailguns, otherwise they might keep running # some heavyweight compilation and gum up the system during a subsequent run. kill_nailguns = True raise except Exception: fail() raise finally: self.run_tracker.end() # Must kill nailguns only after run_tracker.end() is called, otherwise there may still # be pending background work that needs a nailgun. if kill_nailguns: # TODO: This is JVM-specific and really doesn't belong here. # TODO: Make this more selective? Only kill nailguns that affect state? # E.g., checkstyle may not need to be killed. NailgunTask.killall() return result def _do_run(self): # Update the reporting settings, now that we have flags etc. def is_quiet_task(): for goal in self.goals: if goal.has_task_of_type(QuietTaskMixin): return True return False is_explain = self.global_options.explain update_reporting(self.global_options, is_quiet_task() or is_explain, self.run_tracker) context = Context( config=self.config, options=self.options, run_tracker=self.run_tracker, target_roots=self.targets, requested_goals=self.requested_goals, build_graph=self.build_graph, build_file_parser=self.build_file_parser, address_mapper=self.address_mapper, spec_excludes=self.spec_excludes ) unknown = [] for goal in self.goals: if not goal.ordered_task_names(): unknown.append(goal) if unknown: context.log.error('Unknown goal(s): %s\n' % ' '.join(goal.name for goal in unknown)) return 1 engine = RoundEngine() return engine.execute(context, self.goals) def _setup_logging(self, global_options): # NB: quiet help says 'Squelches all console output apart from errors'. level = 'ERROR' if global_options.quiet else global_options.level.upper() setup_logging(level, log_dir=global_options.logdir)
def __init__(self, run_tracker, root_dir, parser, args): """run_tracker: The (already opened) RunTracker to track this run with root_dir: The root directory of the pants workspace parser: an OptionParser args: the subcommand arguments to parse""" self.run_tracker = run_tracker self.root_dir = root_dir # TODO(pl): Gross that we're doing a local import here, but this has dependendencies # way down into specific Target subclasses, and I'd prefer to make it explicit that this # import is in many ways similar to to third party plugin imports below. from pants.base.build_file_aliases import (target_aliases, object_aliases, applicative_path_relative_util_aliases, partial_path_relative_util_aliases) for alias, target_type in target_aliases.items(): BuildFileParser.register_target_alias(alias, target_type) for alias, obj in object_aliases.items(): BuildFileParser.register_exposed_object(alias, obj) for alias, util in applicative_path_relative_util_aliases.items(): BuildFileParser.register_applicative_path_relative_util(alias, util) for alias, util in partial_path_relative_util_aliases.items(): BuildFileParser.register_partial_path_relative_util(alias, util) config = Config.load() # TODO(pl): This is awful but I need something quick and dirty to support # injection of third party Targets and tools into BUILD file context plugins = config.getlist('plugins', 'entry_points', default=[]) for entry_point_spec in plugins: module, entry_point = entry_point_spec.split(':') plugin_module = __import__(module, globals(), locals(), [entry_point], 0) getattr(plugin_module, entry_point)(config) self.build_file_parser = BuildFileParser(root_dir=self.root_dir, run_tracker=self.run_tracker) self.build_graph = BuildGraph(run_tracker=self.run_tracker) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in config.getlist('goals', 'bootstrap_buildfiles', default=[]): # try: build_file = BuildFile(root_dir=self.root_dir, relpath=path) self.build_file_parser.parse_build_file_family(build_file) # except (TypeError, ImportError): # error(path, include_traceback=True) # except (IOError, SyntaxError): # error(path) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Override the OptionParser's error with more useful output def error(message=None, show_help=True): if message: print(message + '\n') if show_help: parser.print_help() parser.exit(status=1) parser.error = error self.error = error self.setup_parser(parser, args) self.options, self.args = parser.parse_args(args) self.parser = parser
class GoalRunner(object): """Lists installed goals or else executes a named goal.""" def __init__(self, root_dir): """ :param root_dir: The root directory of the pants workspace. """ self.root_dir = root_dir def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends( plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scopes = [''] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.new_options = options_bootstrapper.get_full_options( known_scopes=known_scopes) self.register_options() self.run_tracker = RunTracker.from_config(self.config) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log( Report.INFO, '(To run a reporting server: ./pants goal server)') self.build_file_parser = BuildFileParser( build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs() def get_spec_excludes(self): # Note: Only call after register_options() has been called. return [ os.path.join(self.root_dir, spec_exclude) for spec_exclude in self.new_options.for_global_scope().spec_excludes ] @property def global_options(self): return self.new_options.for_global_scope() def register_options(self): # Add a 'bootstrap' attribute to the register function, so that register_global can # access the bootstrap option values. def register_global(*args, **kwargs): return self.new_options.register_global(*args, **kwargs) register_global.bootstrap = self.new_options.bootstrap_option_values() register_global_options(register_global) for goal in Goal.all(): goal.register_options(self.new_options) def _expand_goals_and_specs(self): logger = logging.getLogger(__name__) goals = self.new_options.goals specs = self.new_options.target_specs fail_fast = self.new_options.for_global_scope().fail_fast for goal in goals: if BuildFile.from_cache(get_buildroot(), goal, must_exist=False).exists(): logger.warning( " Command-line argument '{0}' is ambiguous and was assumed to be " "a goal. If this is incorrect, disambiguate it with ./{0}." .format(goal)) if self.new_options.is_help: self.new_options.print_help(goals=goals) sys.exit(0) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): spec_parser = CmdLineSpecParser( self.root_dir, self.address_mapper, spec_excludes=self.get_spec_excludes()) with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): for spec in specs: for address in spec_parser.parse_addresses( spec, fail_fast): self.build_graph.inject_address_closure(address) self.targets.append( self.build_graph.get_target(address)) self.goals = [Goal.by_name(goal) for goal in goals] def run(self): def fail(): self.run_tracker.set_root_outcome(WorkUnit.FAILURE) kill_nailguns = self.new_options.for_global_scope().kill_nailguns try: result = self._do_run() if result: fail() except KeyboardInterrupt: fail() # On ctrl-c we always kill nailguns, otherwise they might keep running # some heavyweight compilation and gum up the system during a subsequent run. kill_nailguns = True raise except Exception: fail() raise finally: self.run_tracker.end() # Must kill nailguns only after run_tracker.end() is called, otherwise there may still # be pending background work that needs a nailgun. if kill_nailguns: # TODO: This is JVM-specific and really doesn't belong here. # TODO: Make this more selective? Only kill nailguns that affect state? # E.g., checkstyle may not need to be killed. NailgunTask.killall(log.info) return result def _do_run(self): # TODO(John Sirois): Consider moving to straight python logging. The divide between the # context/work-unit logging and standard python logging doesn't buy us anything. # Enable standard python logging for code with no handle to a context/work-unit. if self.global_options.level: LogOptions.set_stderr_log_level((self.global_options.level or 'info').upper()) logdir = self.global_options.logdir or self.config.get( 'goals', 'logdir', default=None) if logdir: safe_mkdir(logdir) LogOptions.set_log_dir(logdir) prev_log_level = None # If quiet, temporarily change stderr log level to kill init's output. if self.global_options.quiet: prev_log_level = LogOptions.loglevel_name( LogOptions.stderr_log_level()) # loglevel_name can fail, so only change level if we were able to get the current one. if prev_log_level is not None: LogOptions.set_stderr_log_level( LogOptions._LOG_LEVEL_NONE_KEY) log.init('goals') if prev_log_level is not None: LogOptions.set_stderr_log_level(prev_log_level) else: log.init() # Update the reporting settings, now that we have flags etc. def is_quiet_task(): for goal in self.goals: if goal.has_task_of_type(QuietTaskMixin): return True return False # Target specs are mapped to the patterns which match them, if any. This variable is a key for # specs which don't match any exclusion regexes. We know it won't already be in the list of # patterns, because the asterisks in its name make it an invalid regex. _UNMATCHED_KEY = '** unmatched **' def targets_by_pattern(targets, patterns): mapping = defaultdict(list) for target in targets: matched_pattern = None for pattern in patterns: if re.search(pattern, target.address.spec) is not None: matched_pattern = pattern break if matched_pattern is None: mapping[_UNMATCHED_KEY].append(target) else: mapping[matched_pattern].append(target) return mapping is_explain = self.global_options.explain update_reporting(self.global_options, is_quiet_task() or is_explain, self.run_tracker) if self.global_options.exclude_target_regexp: excludes = self.global_options.exclude_target_regexp log.debug('excludes:\n {excludes}'.format( excludes='\n '.join(excludes))) by_pattern = targets_by_pattern(self.targets, excludes) self.targets = by_pattern[_UNMATCHED_KEY] # The rest of this if-statement is just for debug logging. log.debug('Targets after excludes: {targets}'.format( targets=', '.join(t.address.spec for t in self.targets))) excluded_count = sum(len(by_pattern[p]) for p in excludes) log.debug('Excluded {count} target{plural}.'.format( count=excluded_count, plural=('s' if excluded_count != 1 else ''))) for pattern in excludes: log.debug('Targets excluded by pattern {pattern}\n {targets}'. format(pattern=pattern, targets='\n '.join( t.address.spec for t in by_pattern[pattern]))) context = Context(config=self.config, new_options=self.new_options, run_tracker=self.run_tracker, target_roots=self.targets, requested_goals=self.requested_goals, build_graph=self.build_graph, build_file_parser=self.build_file_parser, address_mapper=self.address_mapper, spec_excludes=self.get_spec_excludes()) unknown = [] for goal in self.goals: if not goal.ordered_task_names(): unknown.append(goal) if unknown: context.log.error('Unknown goal(s): %s\n' % ' '.join(goal.name for goal in unknown)) return 1 engine = RoundEngine() return engine.execute(context, self.goals)