def __init__(self, context): NailgunTask.__init__(self, context) self._scalastyle_config = self.context.config.get_required( Scalastyle._CONFIG_SECTION, 'config') if not os.path.exists(self._scalastyle_config): raise Config.ConfigError( 'Scalastyle config file does not exist: %s' % self._scalastyle_config) excludes_file = self.context.config.get(Scalastyle._CONFIG_SECTION, 'excludes') self._excludes = set() if excludes_file: if not os.path.exists(excludes_file): raise Config.ConfigError( 'Scalastyle excludes file does not exist: %s' % excludes_file) self.context.log.debug('Using scalastyle excludes file %s' % excludes_file) with open(excludes_file) as fh: for pattern in fh.readlines(): self._excludes.add(re.compile(pattern.strip())) self._scalastyle_bootstrap_key = 'scalastyle' self.register_jvm_tool(self._scalastyle_bootstrap_key, [':scalastyle'])
def create_config(sample_ini='', defaults=None): """Creates a ``Config`` from the ``sample_ini`` file contents. :param string sample_ini: The contents of the ini file containing the config values. :param dict defaults: An optional dict of global default ini values to seed. """ if not isinstance(sample_ini, Compatibility.string): raise ValueError( 'The sample_ini supplied must be a string, given: %s' % sample_ini) parser = Config.create_parser(defaults) with io.BytesIO(sample_ini) as ini: parser.readfp(ini) return Config(parser)
def setUp(self): with temporary_file() as ini: ini.write( ''' [DEFAULT] answer: 42 scale: 1.2 path: /a/b/%(answer)s embed: %(path)s::foo disclaimer: Let it be known that. [a] fast: True list: [1, 2, 3, %(answer)s] [b] preempt: False dict: { 'a': 1, 'b': %(answer)s, 'c': ['%(answer)s', %(answer)s] } ''') ini.close() self.config = Config.load(configpath=ini.name)
def setup_parser(self, parser, args): parser.set_usage("\n" " %prog build (options) [spec] (build args)\n" " %prog build (options) [spec]... -- (build args)") parser.add_option( "-t", "--timeout", dest="conn_timeout", type="int", default=Config.load().getdefault('connection_timeout'), help="Number of seconds to wait for http connections.") parser.add_option('-i', '--interpreter', dest='interpreter', default=None, help='The interpreter requirement for this chroot.') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='Show verbose output.') parser.disable_interspersed_args() parser.epilog = ( 'Builds the specified Python target(s). Use ./pants goal for JVM and other ' 'targets.')
def __init__(self, target, root_dir, extra_targets=None, builder=None, platforms=None, interpreter=None, conn_timeout=None): self._config = Config.load() self._target = target self._root = root_dir self._platforms = platforms self._interpreter = interpreter or PythonInterpreter.get() self._extra_targets = list( extra_targets) if extra_targets is not None else [] self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter) # Note: unrelated to the general pants artifact cache. self._egg_cache_root = os.path.join( PythonSetup(self._config).scratch_dir('artifact_cache', default_name='artifacts'), str(self._interpreter.identity)) self._key_generator = CacheKeyGenerator() self._build_invalidator = BuildInvalidator(self._egg_cache_root)
def __init__(self, target, root_dir, run_tracker, interpreter=None, conn_timeout=None): self.target = target self.interpreter = interpreter or PythonInterpreter.get() if not isinstance(target, PythonBinary): raise PythonBinaryBuilder.NotABinaryTargetException( "Target %s is not a PythonBinary!" % target) config = Config.load() self.distdir = config.getdefault('pants_distdir') distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name) run_info = run_tracker.run_info build_properties = {} build_properties.update(run_info.add_basic_info(run_id=None, timestamp=time.time())) build_properties.update(run_info.add_scm_info()) pexinfo = target.pexinfo.copy() pexinfo.build_properties = build_properties builder = PEXBuilder(distpath, pex_info=pexinfo, interpreter=self.interpreter) self.chroot = PythonChroot( target, root_dir, builder=builder, interpreter=self.interpreter, conn_timeout=conn_timeout)
def __init__(self, target, root_dir, run_tracker, interpreter=None, conn_timeout=None): self.target = target self.interpreter = interpreter or PythonInterpreter.get() if not isinstance(target, PythonBinary): raise PythonBinaryBuilder.NotABinaryTargetException( "Target %s is not a PythonBinary!" % target) config = Config.load() self.distdir = config.getdefault('pants_distdir') distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name) run_info = run_tracker.run_info build_properties = {} build_properties.update( run_info.add_basic_info(run_id=None, timestamp=time.time())) build_properties.update(run_info.add_scm_info()) pexinfo = target.pexinfo.copy() pexinfo.build_properties = build_properties builder = PEXBuilder(distpath, pex_info=pexinfo, interpreter=self.interpreter) self.chroot = PythonChroot(target, root_dir, builder=builder, interpreter=self.interpreter, conn_timeout=conn_timeout)
def __init__(self): """Creates an ivy bootstrapper.""" self._config = Config.load() self._bootstrap_jar_url = self._config.get('ivy', 'bootstrap_jar_url', default=self._DEFAULT_URL) self._timeout = Amount(self._config.getint('ivy', 'bootstrap_fetch_timeout_secs', default=1), Time.SECONDS) self._version_or_ivyxml = self._config.get('ivy', 'ivy_profile', default=self._DEFAULT_VERSION) self._classpath = None
def generate_coverage_config(target): cp = configparser.ConfigParser() cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG)) cp.add_section('html') target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage', os.path.dirname(target.address.buildfile.relpath), target.name) safe_mkdir(target_dir) cp.set('html', 'directory', target_dir) return cp
def setUp(self): with temporary_file() as ini: ini.write(''' [python-setup] platforms: [ 'current', 'linux-x86_64'] ''') ini.close() self.config = Config.load(configpath=ini.name)
def generate_coverage_config(target): cp = configparser.ConfigParser() cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG)) cp.add_section('html') target_dir = os.path.join( Config.load().getdefault('pants_distdir'), 'coverage', os.path.dirname(target.address.buildfile.relpath), target.name) safe_mkdir(target_dir) cp.set('html', 'directory', target_dir) return cp
def setUp(self): with temporary_file() as ini: ini.write( ''' [python-setup] platforms: [ 'current', 'linux-x86_64'] ''') ini.close() self.config = Config.load(configpath=ini.name)
def create_config(sample_ini='', defaults=None): """Creates a ``Config`` from the ``sample_ini`` file contents. :param string sample_ini: The contents of the ini file containing the config values. :param dict defaults: An optional dict of global default ini values to seed. """ if not isinstance(sample_ini, Compatibility.string): raise ValueError('The sample_ini supplied must be a string, given: %s' % sample_ini) parser = Config.create_parser(defaults) with io.BytesIO(sample_ini) as ini: parser.readfp(ini) return Config(parser)
def setup_parser(self, parser, args): parser.set_usage("\n" " %prog build (options) [spec] (build args)\n" " %prog build (options) [spec]... -- (build args)") parser.add_option("-t", "--timeout", dest="conn_timeout", type="int", default=Config.load().getdefault('connection_timeout'), help="Number of seconds to wait for http connections.") parser.add_option('-i', '--interpreter', dest='interpreter', default=None, help='The interpreter requirement for this chroot.') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='Show verbose output.') parser.disable_interspersed_args() parser.epilog = ('Builds the specified Python target(s). Use ./pants goal for JVM and other ' 'targets.')
def __init__(self, run_tracker, root_dir, parser, argv): Command.__init__(self, run_tracker, root_dir, parser, argv) if not self.args: self.error("A spec argument is required") self.config = Config.load() self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup() interpreters = self.interpreter_cache.select_interpreter( list( self.interpreter_cache.matches( [self.options. interpreter] if self.options.interpreter else ['']))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') else: self.debug('Selected %s' % interpreters[0]) self.interpreter = interpreters[0] try: specs_end = self.args.index('--') if len(self.args) > specs_end: self.build_args = self.args[specs_end + 1:len(self.args) + 1] else: self.build_args = [] except ValueError: specs_end = 1 self.build_args = self.args[1:] if len(self.args) > 1 else [] self.targets = OrderedSet() for spec in self.args[0:specs_end]: try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) if not target: self.error("Target %s does not exist" % address) self.targets.update(tgt for tgt in target.resolve() if tgt.is_concrete)
def __init__(self, target, root_dir, extra_targets=None, builder=None, interpreter=None, conn_timeout=None): self._config = Config.load() self._target = target self._root = root_dir self._interpreter = interpreter or PythonInterpreter.get() self._extra_targets = list(extra_targets) if extra_targets is not None else [] self._resolver = MultiResolver(self._config, target, conn_timeout=conn_timeout) self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter) # Note: unrelated to the general pants artifact cache. self._egg_cache_root = os.path.join(self._config.get('python-setup', 'artifact_cache'), str(self._interpreter.identity)) self._key_generator = CacheKeyGenerator() self._build_invalidator = BuildInvalidator( self._egg_cache_root)
def __init__(self, run_tracker, root_dir, parser, argv): Command.__init__(self, run_tracker, root_dir, parser, argv) if not self.args: self.error("A spec argument is required") self._config = Config.load() self._root = root_dir address = Address.parse(root_dir, self.args[0]) self.target = Target.get(address) if self.target is None: self.error('%s is not a valid target!' % self.args[0]) if not self.target.provides: self.error('Target must provide an artifact.')
def setup_parser(self, parser, args): parser.set_usage('\n' ' %prog py (options) [spec] args\n') parser.disable_interspersed_args() parser.add_option( '-t', '--timeout', dest='conn_timeout', type='int', default=Config.load().getdefault('connection_timeout'), help='Number of seconds to wait for http connections.') parser.add_option( '--pex', dest='pex', default=False, action='store_true', help= 'Dump a .pex of this chroot instead of attempting to execute it.') parser.add_option( '--ipython', dest='ipython', default=False, action='store_true', help='Run the target environment in an IPython interpreter.') parser.add_option( '-r', '--req', dest='extra_requirements', default=[], action='append', help='Additional Python requirements to add to this chroot.') parser.add_option('-i', '--interpreter', dest='interpreter', default=None, help='The interpreter requirement for this chroot.') parser.add_option('-e', '--entry_point', dest='entry_point', default=None, help='The entry point for the generated PEX.') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='Show verbose output.') parser.epilog = """Interact with the chroot of the specified target."""
def iter_generated_sources(cls, target, root, config=None): config = config or Config.load() # This is sort of facepalmy -- python.new will make this much better. for target_type, target_builder in cls.GENERATED_TARGETS.items(): if isinstance(target, target_type): builder_cls = target_builder break else: raise TypeError( 'write_generated_sources could not find suitable code generator for %s' % type(target)) builder = builder_cls(target, root, config) builder.generate() for root, _, files in os.walk(builder.package_root): for fn in files: target_file = os.path.join(root, fn) yield os.path.relpath(target_file, builder.package_root), target_file
def __init__( self, target, root_dir, extra_targets=None, builder=None, platforms=None, interpreter=None, conn_timeout=None ): self._config = Config.load() self._target = target self._root = root_dir self._platforms = platforms self._interpreter = interpreter or PythonInterpreter.get() self._extra_targets = list(extra_targets) if extra_targets is not None else [] self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter) # Note: unrelated to the general pants artifact cache. self._egg_cache_root = os.path.join( PythonSetup(self._config).scratch_dir("artifact_cache", default_name="artifacts"), str(self._interpreter.identity), ) self._key_generator = CacheKeyGenerator() self._build_invalidator = BuildInvalidator(self._egg_cache_root)
def __init__(self, run_tracker, root_dir, parser, argv): Command.__init__(self, run_tracker, root_dir, parser, argv) if not self.args: self.error("A spec argument is required") self.config = Config.load() self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup() interpreters = self.interpreter_cache.select_interpreter( list(self.interpreter_cache.matches([self.options.interpreter] if self.options.interpreter else ['']))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') else: self.debug('Selected %s' % interpreters[0]) self.interpreter = interpreters[0] try: specs_end = self.args.index('--') if len(self.args) > specs_end: self.build_args = self.args[specs_end+1:len(self.args)+1] else: self.build_args = [] except ValueError: specs_end = 1 self.build_args = self.args[1:] if len(self.args) > 1 else [] self.targets = OrderedSet() for spec in self.args[0:specs_end]: try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) if not target: self.error("Target %s does not exist" % address) self.targets.update(tgt for tgt in target.resolve() if tgt.is_concrete)
def setup_parser(self, parser, args): parser.set_usage('\n' ' %prog py (options) [spec] args\n') parser.disable_interspersed_args() parser.add_option('-t', '--timeout', dest='conn_timeout', type='int', default=Config.load().getdefault('connection_timeout'), help='Number of seconds to wait for http connections.') parser.add_option('--pex', dest='pex', default=False, action='store_true', help='Dump a .pex of this chroot instead of attempting to execute it.') parser.add_option('--ipython', dest='ipython', default=False, action='store_true', help='Run the target environment in an IPython interpreter.') parser.add_option('-r', '--req', dest='extra_requirements', default=[], action='append', help='Additional Python requirements to add to this chroot.') parser.add_option('-i', '--interpreter', dest='interpreter', default=None, help='The interpreter requirement for this chroot.') parser.add_option('-e', '--entry_point', dest='entry_point', default=None, help='The entry point for the generated PEX.') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='Show verbose output.') parser.epilog = """Interact with the chroot of the specified target."""
def apply_defaults(self, commands, args): """ Augments the given list of arguments with any default options found for the given commands. The returned argments will be a new copy of the given args with possibly extra augmented arguments. Default options are applied from the following keys under a section with the name of the subcommand the default options apply to: 'options': These options are either prepended or appended to the command line args as specified in the constructor with default_prepend. 'prepend-options': These options are prepended to the command line args. 'append-options': These options are appended to the command line args. """ args = args[:] if RcFile._DISABLE_PANTS_RC_OPTION in args: return args config = Config.create_parser() read_from = config.read(self.paths) if not read_from: log.debug('no rcfile found') return args log.debug('using rcfiles: %s to modify args' % ','.join(read_from)) def get_rcopts(command, key): return config.get(command, key).split() if config.has_option( command, key) else [] commands = list(commands) if self.process_default: commands.insert(0, Config.DEFAULT_SECTION) for cmd in commands: opts = get_rcopts(cmd, 'options') args = (opts + args) if self.default_prepend else (args + opts) args = get_rcopts(cmd, 'prepend-options') + args + get_rcopts( cmd, 'append-options') return args
def apply_defaults(self, commands, args): """Augment arguments with defaults found for the given commands. The returned arguments will be a new copy of the given args with possibly extra augmented arguments. Default options are applied from the following keys under a section with the name of the sub-command the default options apply to: * `options` - These options are either prepended or appended to the command line args as specified in the constructor with default_prepend. * `prepend-options` - These options are prepended to the command line args. * `append-options` - These options are appended to the command line args. """ args = args[:] if RcFile._DISABLE_PANTS_RC_OPTION in args: return args config = Config.create_parser() read_from = config.read(self.paths) if not read_from: log.debug('no rcfile found') return args log.debug('using rcfiles: %s to modify args' % ','.join(read_from)) def get_rcopts(command, key): return config.get(command, key).split() if config.has_option(command, key) else [] commands = list(commands) if self.process_default: commands.insert(0, Config.DEFAULT_SECTION) for cmd in commands: opts = get_rcopts(cmd, 'options') args = (opts + args) if self.default_prepend else (args + opts) args = get_rcopts(cmd, 'prepend-options') + args + get_rcopts(cmd, 'append-options') return args
def _run(): """ To add additional paths to sys.path, add a block to the config similar to the following: [main] roots: ['src/python/twitter/pants_internal/test/',] """ version = get_version() if len(sys.argv) == 2 and sys.argv[1] == _VERSION_OPTION: _do_exit(version) root_dir = get_buildroot() if not os.path.exists(root_dir): _exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: %s' % root_dir) if len(sys.argv) < 2 or (len(sys.argv) == 2 and sys.argv[1] in _HELP_ALIASES): _help(version, root_dir) command_class, command_args = _parse_command(root_dir, sys.argv[1:]) parser = optparse.OptionParser(version=version) RcFile.install_disable_rc_option(parser) parser.add_option(_LOG_EXIT_OPTION, action='store_true', default=False, dest='log_exit', help = 'Log an exit message on success or failure.') config = Config.load() # TODO: This can be replaced once extensions are enabled with # https://github.com/pantsbuild/pants/issues/5 roots = config.getlist('parse', 'roots', default=[]) sys.path.extend(map(lambda root: os.path.join(root_dir, root), roots)) # XXX(wickman) This should be in the command goal, not un pants_exe.py! run_tracker = RunTracker.from_config(config) report = initial_reporting(config, run_tracker) run_tracker.start(report) url = run_tracker.run_info.get_info('report_url') if url: run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') command = command_class(run_tracker, root_dir, parser, command_args) try: if command.serialized(): def onwait(pid): print('Waiting on pants process %s to complete' % _process_info(pid), file=sys.stderr) return True runfile = os.path.join(root_dir, '.pants.run') lock = Lock.acquire(runfile, onwait=onwait) else: lock = Lock.unlocked() try: result = command.run(lock) _do_exit(result) except KeyboardInterrupt: command.cleanup() raise finally: lock.release() finally: run_tracker.end() # Must kill nailguns only after run_tracker.end() is called, because there may still # be pending background work that needs a nailgun. if (hasattr(command.options, 'cleanup_nailguns') and command.options.cleanup_nailguns) \ or config.get('nailgun', 'autokill', default=False): NailgunTask.killall(None)
def __init__(self, run_tracker, root_dir, parser, argv): Command.__init__(self, run_tracker, root_dir, parser, argv) self.target = None self.extra_targets = [] self.config = Config.load() self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup() interpreters = self.interpreter_cache.select_interpreter( list( self.interpreter_cache.matches( [self.options. interpreter] if self.options.interpreter else ['']))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') self.interpreter = interpreters[0] for req in self.options.extra_requirements: with ParseContext.temp(): self.extra_targets.append(PythonRequirement(req, use_2to3=True)) # We parse each arg in the context of the cli usage: # ./pants command (options) [spec] (build args) # ./pants command (options) [spec]... -- (build args) # Our command token and our options are parsed out so we see args of the form: # [spec] (build args) # [spec]... -- (build args) binaries = [] for k in range(len(self.args)): arg = self.args.pop(0) if arg == '--': break def not_a_target(debug_msg): self.debug('Not a target, assuming option: %s.' % e) # We failed to parse the arg as a target or else it was in valid address format but did not # correspond to a real target. Assume this is the 1st of the build args and terminate # processing args for target addresses. self.args.insert(0, arg) target = None try: address = Address.parse(root_dir, arg) target = Target.get(address) if target is None: not_a_target(debug_msg='Unrecognized target') break except Exception as e: not_a_target(debug_msg=e) break for resolved in filter(lambda t: t.is_concrete, target.resolve()): if isinstance(resolved, PythonBinary): binaries.append(resolved) else: self.extra_targets.append(resolved) if len(binaries) == 0: # treat as a chroot pass elif len(binaries) == 1: # We found a binary and are done, the rest of the args get passed to it self.target = binaries[0] else: self.error( 'Can only process 1 binary target, %s contains %d:\n\t%s' % (arg, len(binaries), '\n\t'.join( str(binary.address) for binary in binaries))) if self.target is None: if not self.extra_targets: self.error('No valid target specified!') self.target = self.extra_targets.pop(0)
def setUpClass(cls): cls.config = Config.load()
def setup_parser(self, parser, args): self.config = Config.load() Goal.add_global_options(parser) # We support attempting zero or more goals. Multiple goals must be delimited from further # options and non goal args with a '--'. The key permutations we need to support: # ./pants goal => goals # ./pants goal goals => goals # ./pants goal compile src/java/... => compile # ./pants goal compile -x src/java/... => compile # ./pants goal compile src/java/... -x => compile # ./pants goal compile run -- src/java/... => compile, run # ./pants goal compile run -- src/java/... -x => compile, run # ./pants goal compile run -- -x src/java/... => compile, run if not args: args.append('goals') if len(args) == 1 and args[0] in set(['-h', '--help', 'help']): def format_usage(usages): left_colwidth = 0 for left, right in usages: left_colwidth = max(left_colwidth, len(left)) lines = [] for left, right in usages: lines.append(' %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right)) return '\n'.join(lines) usages = [ ("%prog goal goals ([spec]...)", Phase('goals').description), ("%prog goal help [goal] ([spec]...)", Phase('help').description), ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."), ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."), ] parser.set_usage("\n%s" % format_usage(usages)) parser.epilog = ("Either lists all installed goals, provides extra help for a goal or else " "attempts to achieve the specified goal for the listed targets." """ Note that target specs accept two special forms: [dir]: to include all targets in the specified directory [dir]:: to include all targets found in all BUILD files recursively under the directory""") parser.print_help() sys.exit(0) else: goals, specs = Goal.parse_args(args) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): # Bootstrap goals by loading any configured bootstrap BUILD files with self.check_errors('The following bootstrap_buildfiles cannot be loaded:') as error: with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): for path in self.config.getlist('goals', 'bootstrap_buildfiles', default = []): try: buildfile = BuildFile(get_buildroot(), os.path.relpath(path, get_buildroot())) ParseContext(buildfile).parse() except (TypeError, ImportError, TaskError, GoalError): error(path, include_traceback=True) except (IOError, SyntaxError): error(path) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Bootstrap user goals by loading any BUILD files implied by targets. spec_parser = SpecParser(self.root_dir) with self.check_errors('The following targets could not be loaded:') as error: with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]): for spec in specs: try: for target, address in spec_parser.parse(spec): if target: self.targets.append(target) # Force early BUILD file loading if this target is an alias that expands # to others. unused = list(target.resolve()) else: siblings = Target.get_all_addresses(address.buildfile) prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' error('%s => %s?:\n %s' % (address, prompt, '\n '.join(str(a) for a in siblings))) except (TypeError, ImportError, TaskError, GoalError): error(spec, include_traceback=True) except (IOError, SyntaxError, TargetDefinitionException): error(spec) self.phases = [Phase(goal) for goal in goals] rcfiles = self.config.getdefault('rcfiles', type=list, default=['/etc/pantsrc', '~/.pants.rc']) if rcfiles: rcfile = RcFile(rcfiles, default_prepend=False, process_default=True) # Break down the goals specified on the command line to the full set that will be run so we # can apply default flags to inner goal nodes. Also break down goals by Task subclass and # register the task class hierarchy fully qualified names so we can apply defaults to # baseclasses. sections = OrderedSet() for phase in Engine.execution_order(self.phases): for goal in phase.goals(): sections.add(goal.name) for clazz in goal.task_type.mro(): if clazz == Task: break sections.add('%s.%s' % (clazz.__module__, clazz.__name__)) augmented_args = rcfile.apply_defaults(sections, args) if augmented_args != args: del args[:] args.extend(augmented_args) sys.stderr.write("(using pantsrc expansion: pants goal %s)\n" % ' '.join(augmented_args)) Phase.setup_parser(parser, args, self.phases)
def __init__(self, run_tracker, root_dir, parser, argv): Command.__init__(self, run_tracker, root_dir, parser, argv) self.target = None self.extra_targets = [] self.config = Config.load() self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup() interpreters = self.interpreter_cache.select_interpreter( list(self.interpreter_cache.matches([self.options.interpreter] if self.options.interpreter else ['']))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') self.interpreter = interpreters[0] for req in self.options.extra_requirements: with ParseContext.temp(): self.extra_targets.append(PythonRequirement(req, use_2to3=True)) # We parse each arg in the context of the cli usage: # ./pants command (options) [spec] (build args) # ./pants command (options) [spec]... -- (build args) # Our command token and our options are parsed out so we see args of the form: # [spec] (build args) # [spec]... -- (build args) binaries = [] for k in range(len(self.args)): arg = self.args.pop(0) if arg == '--': break def not_a_target(debug_msg): self.debug('Not a target, assuming option: %s.' % e) # We failed to parse the arg as a target or else it was in valid address format but did not # correspond to a real target. Assume this is the 1st of the build args and terminate # processing args for target addresses. self.args.insert(0, arg) target = None try: address = Address.parse(root_dir, arg) target = Target.get(address) if target is None: not_a_target(debug_msg='Unrecognized target') break except Exception as e: not_a_target(debug_msg=e) break for resolved in filter(lambda t: t.is_concrete, target.resolve()): if isinstance(resolved, PythonBinary): binaries.append(resolved) else: self.extra_targets.append(resolved) if len(binaries) == 0: # treat as a chroot pass elif len(binaries) == 1: # We found a binary and are done, the rest of the args get passed to it self.target = binaries[0] else: self.error('Can only process 1 binary target, %s contains %d:\n\t%s' % ( arg, len(binaries), '\n\t'.join(str(binary.address) for binary in binaries) )) if self.target is None: if not self.extra_targets: self.error('No valid target specified!') self.target = self.extra_targets.pop(0)
def _run(): """ To add additional paths to sys.path, add a block to the config similar to the following: [main] roots: ['src/python/twitter/pants_internal/test/',] """ version = get_version() if len(sys.argv) == 2 and sys.argv[1] == _VERSION_OPTION: _do_exit(version) root_dir = get_buildroot() if not os.path.exists(root_dir): _exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: %s' % root_dir) if len(sys.argv) < 2 or (len(sys.argv) == 2 and sys.argv[1] in _HELP_ALIASES): _help(version, root_dir) command_class, command_args = _parse_command(root_dir, sys.argv[1:]) parser = optparse.OptionParser(version=version) RcFile.install_disable_rc_option(parser) parser.add_option(_LOG_EXIT_OPTION, action='store_true', default=False, dest='log_exit', help='Log an exit message on success or failure.') config = Config.load() # TODO: This can be replaced once extensions are enabled with # https://github.com/pantsbuild/pants/issues/5 roots = config.getlist('parse', 'roots', default=[]) sys.path.extend(map(lambda root: os.path.join(root_dir, root), roots)) # XXX(wickman) This should be in the command goal, not un pants_exe.py! run_tracker = RunTracker.from_config(config) report = initial_reporting(config, run_tracker) run_tracker.start(report) url = run_tracker.run_info.get_info('report_url') if url: run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') command = command_class(run_tracker, root_dir, parser, command_args) try: if command.serialized(): def onwait(pid): print('Waiting on pants process %s to complete' % _process_info(pid), file=sys.stderr) return True runfile = os.path.join(root_dir, '.pants.run') lock = Lock.acquire(runfile, onwait=onwait) else: lock = Lock.unlocked() try: result = command.run(lock) _do_exit(result) except KeyboardInterrupt: command.cleanup() raise finally: lock.release() finally: run_tracker.end() # Must kill nailguns only after run_tracker.end() is called, because there may still # be pending background work that needs a nailgun. if (hasattr(command.options, 'cleanup_nailguns') and command.options.cleanup_nailguns) \ or config.get('nailgun', 'autokill', default=False): NailgunTask.killall(None)
class Goal(Command): """Lists installed goals or else executes a named goal.""" __command__ = 'goal' GLOBAL_OPTIONS = [ Option("-t", "--timeout", dest="conn_timeout", type='int', default=Config.load().getdefault('connection_timeout'), help="Number of seconds to wait for http connections."), Option("-x", "--time", action="store_true", dest="time", default=False, help="Times goal phases and outputs a report."), Option("-e", "--explain", action="store_true", dest="explain", default=False, help="Explain the execution of goals."), Option("-k", "--kill-nailguns", action="store_true", dest="cleanup_nailguns", default=False, help="Kill nailguns before exiting"), Option("-d", "--logdir", dest="logdir", help="[%default] Forks logs to files under this directory."), Option( "-l", "--level", dest="log_level", type="choice", choices=['debug', 'info', 'warn'], help= "[info] Sets the logging level to one of 'debug', 'info' or 'warn'." "if set."), Option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Squelches all console output apart from errors."), Option("--no-colors", dest="no_color", action="store_true", default=turn_off_colored_logging, help="Do not colorize log messages."), Option( "-n", "--dry-run", action="store_true", dest="dry_run", default=False, help= "Print the commands that would be run, without actually running them." ), Option( "--read-from-artifact-cache", "--no-read-from-artifact-cache", action="callback", callback=_set_bool, dest="read_from_artifact_cache", default=True, help= "Whether to read artifacts from cache instead of building them, if configured to do so." ), Option( "--write-to-artifact-cache", "--no-write-to-artifact-cache", action="callback", callback=_set_bool, dest="write_to_artifact_cache", default=True, help="Whether to write artifacts to cache if configured to do so." ), # NONE OF THE ARTIFACT CACHE FLAGS BELOW DO ANYTHING ANY MORE. # TODO: Remove them once all uses of them are killed. Option( "--verify-artifact-cache", "--no-verify-artifact-cache", action="callback", callback=_set_bool, dest="verify_artifact_cache", default=False, help= "Whether to verify that cached artifacts are identical after rebuilding them." ), Option( "--local-artifact-cache-readonly", "--no-local-artifact-cache-readonly", action="callback", callback=_set_bool, dest="local_artifact_cache_readonly", default=False, help= "If set, we don't write to local artifact caches, even when writes are enabled." ), # Note that remote writes are disabled by default, so you have control over who's populating # the shared cache. Option( "--remote-artifact-cache-readonly", "--no-remote-artifact-cache-readonly", action="callback", callback=_set_bool, dest="remote_artifact_cache_readonly", default=True, help= "If set, we don't write to remote artifact caches, even when writes are enabled." ), Option( "--all", dest="target_directory", action="append", help= "DEPRECATED: Use [dir]: with no flag in a normal target position on the command " "line. (Adds all targets found in the given directory's BUILD file. Can be " "specified more than once.)"), Option( "--all-recursive", dest="recursive_directory", action="append", help= "DEPRECATED: Use [dir]:: with no flag in a normal target position on the command " "line. (Adds all targets found recursively under the given directory. Can be " "specified more than once to add more than one root target directory to scan.)" ), ] output = None @staticmethod def add_global_options(parser): for option in Goal.GLOBAL_OPTIONS: parser.add_option(option) @staticmethod def parse_args(args): goals = OrderedSet() specs = OrderedSet() help = False explicit_multi = False def is_spec(spec): return os.sep in spec or ':' in spec for i, arg in enumerate(args): help = help or 'help' == arg if not arg.startswith('-'): specs.add(arg) if is_spec(arg) else goals.add(arg) elif '--' == arg: if specs: raise GoalError( 'Cannot intermix targets with goals when using --. Targets should ' 'appear on the right') explicit_multi = True del args[i] break if explicit_multi: spec_offset = len(goals) + 1 if help else len(goals) specs.update(arg for arg in args[spec_offset:] if not arg.startswith('-')) return goals, specs @classmethod def execute(cls, context, *names): parser = OptionParser() cls.add_global_options(parser) phases = [Phase(name) for name in names] Phase.setup_parser(parser, [], phases) options, _ = parser.parse_args([]) context = Context(context.config, options, context.run_tracker, context.target_roots, requested_goals=list(names)) return cls._execute(context, phases, print_timing=False) @staticmethod def _execute(context, phases, print_timing): engine = GroupEngine(print_timing=print_timing) return engine.execute(context, phases) # TODO(John Sirois): revisit wholesale locking when we move py support into pants new @classmethod def serialized(cls): # Goal serialization is now handled in goal execution during group processing. # The goal command doesn't need to hold the serialization lock; individual goals will # acquire the lock if they need to be serialized. return False def __init__(self, run_tracker, root_dir, parser, args): self.targets = [] Command.__init__(self, run_tracker, root_dir, parser, args) @contextmanager def check_errors(self, banner): errors = {} def error(key, include_traceback=False): exc_type, exc_value, _ = sys.exc_info() msg = StringIO() if include_traceback: frame = inspect.trace()[-2] filename = frame[1] lineno = frame[2] funcname = frame[3] code = ''.join(frame[4]) if frame[4] else None traceback.print_list([(filename, lineno, funcname, code)], file=msg) if exc_type: msg.write(''.join( traceback.format_exception_only(exc_type, exc_value))) errors[key] = msg.getvalue() sys.exc_clear() yield error if errors: msg = StringIO() msg.write(banner) invalid_keys = [key for key, exc in errors.items() if not exc] if invalid_keys: msg.write('\n %s' % '\n '.join(invalid_keys)) for key, exc in errors.items(): if exc: msg.write('\n %s =>\n %s' % (key, '\n '.join(exc.splitlines()))) # The help message for goal is extremely verbose, and will obscure the # actual error message, so we don't show it in this case. self.error(msg.getvalue(), show_help=False) def setup_parser(self, parser, args): self.config = Config.load() Goal.add_global_options(parser) # We support attempting zero or more goals. Multiple goals must be delimited from further # options and non goal args with a '--'. The key permutations we need to support: # ./pants goal => goals # ./pants goal goals => goals # ./pants goal compile src/java/... => compile # ./pants goal compile -x src/java/... => compile # ./pants goal compile src/java/... -x => compile # ./pants goal compile run -- src/java/... => compile, run # ./pants goal compile run -- src/java/... -x => compile, run # ./pants goal compile run -- -x src/java/... => compile, run if not args: args.append('goals') if len(args) == 1 and args[0] in set(['-h', '--help', 'help']): def format_usage(usages): left_colwidth = 0 for left, right in usages: left_colwidth = max(left_colwidth, len(left)) lines = [] for left, right in usages: lines.append(' %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right)) return '\n'.join(lines) usages = [ ("%prog goal goals ([spec]...)", Phase('goals').description), ("%prog goal help [goal] ([spec]...)", Phase('help').description), ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."), ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."), ] parser.set_usage("\n%s" % format_usage(usages)) parser.epilog = ( "Either lists all installed goals, provides extra help for a goal or else " "attempts to achieve the specified goal for the listed targets." """ Note that target specs accept two special forms: [dir]: to include all targets in the specified directory [dir]:: to include all targets found in all BUILD files recursively under the directory""") parser.print_help() sys.exit(0) else: goals, specs = Goal.parse_args(args) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): # Bootstrap goals by loading any configured bootstrap BUILD files with self.check_errors( 'The following bootstrap_buildfiles cannot be loaded:' ) as error: with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP ]): for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): try: buildfile = BuildFile( get_buildroot(), os.path.relpath(path, get_buildroot())) ParseContext(buildfile).parse() except (TypeError, ImportError, TaskError, GoalError): error(path, include_traceback=True) except (IOError, SyntaxError): error(path) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Bootstrap user goals by loading any BUILD files implied by targets. spec_parser = SpecParser(self.root_dir) with self.check_errors( 'The following targets could not be loaded:') as error: with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP ]): for spec in specs: try: for target, address in spec_parser.parse(spec): if target: self.targets.append(target) # Force early BUILD file loading if this target is an alias that expands # to others. unused = list(target.resolve()) else: siblings = Target.get_all_addresses( address.buildfile) prompt = 'did you mean' if len( siblings ) == 1 else 'maybe you meant one of these' error('%s => %s?:\n %s' % (address, prompt, '\n '.join( str(a) for a in siblings))) except (TypeError, ImportError, TaskError, GoalError): error(spec, include_traceback=True) except (IOError, SyntaxError, TargetDefinitionException): error(spec) self.phases = [Phase(goal) for goal in goals] rcfiles = self.config.getdefault('rcfiles', type=list, default=[]) if rcfiles: rcfile = RcFile(rcfiles, default_prepend=False, process_default=True) # Break down the goals specified on the command line to the full set that will be run so we # can apply default flags to inner goal nodes. Also break down goals by Task subclass and # register the task class hierarchy fully qualified names so we can apply defaults to # baseclasses. sections = OrderedSet() for phase in Engine.execution_order(self.phases): for goal in phase.goals(): sections.add(goal.name) for clazz in goal.task_type.mro(): if clazz == Task: break sections.add('%s.%s' % (clazz.__module__, clazz.__name__)) augmented_args = rcfile.apply_defaults(sections, args) if augmented_args != args: del args[:] args.extend(augmented_args) sys.stderr.write( "(using pantsrc expansion: pants goal %s)\n" % ' '.join(augmented_args)) Phase.setup_parser(parser, args, self.phases) def run(self, lock): # TODO(John Sirois): Consider moving to straight python logging. The divide between the # context/work-unit logging and standard python logging doesn't buy us anything. # Enable standard python logging for code with no handle to a context/work-unit. if self.options.log_level: LogOptions.set_stderr_log_level((self.options.log_level or 'info').upper()) logdir = self.options.logdir or self.config.get( 'goals', 'logdir', default=None) if logdir: safe_mkdir(logdir) LogOptions.set_log_dir(logdir) log.init('goals') else: log.init() # Update the reporting settings, now that we have flags etc. def is_console_task(): for phase in self.phases: for goal in phase.goals(): if issubclass(goal.task_type, ConsoleTask): return True return False is_explain = self.options.explain update_reporting(self.options, is_console_task() or is_explain, self.run_tracker) if self.options.dry_run: print('****** Dry Run ******') context = Context(self.config, self.options, self.run_tracker, self.targets, requested_goals=self.requested_goals, lock=lock) if self.options.recursive_directory: context.log.warn( '--all-recursive is deprecated, use a target spec with the form [dir]:: instead' ) for dir in self.options.recursive_directory: self.add_target_recursive(dir) if self.options.target_directory: context.log.warn( '--all is deprecated, use a target spec with the form [dir]: instead' ) for dir in self.options.target_directory: self.add_target_directory(dir) unknown = [] for phase in self.phases: if not phase.goals(): unknown.append(phase) if unknown: _list_goals( context, 'Unknown goal(s): %s' % ' '.join(phase.name for phase in unknown)) return 1 return Goal._execute(context, self.phases, print_timing=self.options.time) def cleanup(self): # TODO: Make this more selective? Only kill nailguns that affect state? E.g., checkstyle # may not need to be killed. NailgunTask.killall(log.info) sys.exit(1)
def setup_parser(self, parser, args): self.config = Config.load() Goal.add_global_options(parser) # We support attempting zero or more goals. Multiple goals must be delimited from further # options and non goal args with a '--'. The key permutations we need to support: # ./pants goal => goals # ./pants goal goals => goals # ./pants goal compile src/java/... => compile # ./pants goal compile -x src/java/... => compile # ./pants goal compile src/java/... -x => compile # ./pants goal compile run -- src/java/... => compile, run # ./pants goal compile run -- src/java/... -x => compile, run # ./pants goal compile run -- -x src/java/... => compile, run if not args: args.append('goals') if len(args) == 1 and args[0] in set(['-h', '--help', 'help']): def format_usage(usages): left_colwidth = 0 for left, right in usages: left_colwidth = max(left_colwidth, len(left)) lines = [] for left, right in usages: lines.append(' %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right)) return '\n'.join(lines) usages = [ ("%prog goal goals ([spec]...)", Phase('goals').description), ("%prog goal help [goal] ([spec]...)", Phase('help').description), ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."), ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."), ] parser.set_usage("\n%s" % format_usage(usages)) parser.epilog = ( "Either lists all installed goals, provides extra help for a goal or else " "attempts to achieve the specified goal for the listed targets." """ Note that target specs accept two special forms: [dir]: to include all targets in the specified directory [dir]:: to include all targets found in all BUILD files recursively under the directory""") parser.print_help() sys.exit(0) else: goals, specs = Goal.parse_args(args) self.requested_goals = goals with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]): # Bootstrap goals by loading any configured bootstrap BUILD files with self.check_errors( 'The following bootstrap_buildfiles cannot be loaded:' ) as error: with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP ]): for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): try: buildfile = BuildFile( get_buildroot(), os.path.relpath(path, get_buildroot())) ParseContext(buildfile).parse() except (TypeError, ImportError, TaskError, GoalError): error(path, include_traceback=True) except (IOError, SyntaxError): error(path) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Bootstrap user goals by loading any BUILD files implied by targets. spec_parser = SpecParser(self.root_dir) with self.check_errors( 'The following targets could not be loaded:') as error: with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP ]): for spec in specs: try: for target, address in spec_parser.parse(spec): if target: self.targets.append(target) # Force early BUILD file loading if this target is an alias that expands # to others. unused = list(target.resolve()) else: siblings = Target.get_all_addresses( address.buildfile) prompt = 'did you mean' if len( siblings ) == 1 else 'maybe you meant one of these' error('%s => %s?:\n %s' % (address, prompt, '\n '.join( str(a) for a in siblings))) except (TypeError, ImportError, TaskError, GoalError): error(spec, include_traceback=True) except (IOError, SyntaxError, TargetDefinitionException): error(spec) self.phases = [Phase(goal) for goal in goals] rcfiles = self.config.getdefault('rcfiles', type=list, default=[]) if rcfiles: rcfile = RcFile(rcfiles, default_prepend=False, process_default=True) # Break down the goals specified on the command line to the full set that will be run so we # can apply default flags to inner goal nodes. Also break down goals by Task subclass and # register the task class hierarchy fully qualified names so we can apply defaults to # baseclasses. sections = OrderedSet() for phase in Engine.execution_order(self.phases): for goal in phase.goals(): sections.add(goal.name) for clazz in goal.task_type.mro(): if clazz == Task: break sections.add('%s.%s' % (clazz.__module__, clazz.__name__)) augmented_args = rcfile.apply_defaults(sections, args) if augmented_args != args: del args[:] args.extend(augmented_args) sys.stderr.write( "(using pantsrc expansion: pants goal %s)\n" % ' '.join(augmented_args)) Phase.setup_parser(parser, args, self.phases)