def setUpClass(cls): """Ensure that all code has a config to read from the cache. TODO: Yuck. Get rid of this after plumbing options through in the right places. """ super(BaseTest, cls).setUpClass() Config.cache(Config.load())
def __init__(self, *args, **kwargs): super(Scalastyle, self).__init__(*args, **kwargs) self._scalastyle_config = self.context.config.get_required( self._CONFIG_SECTION, 'config') if not os.path.exists(self._scalastyle_config): raise Config.ConfigError( 'Scalastyle config file does not exist: %s' % self._scalastyle_config) excludes_file = self.context.config.get(self._CONFIG_SECTION, 'excludes') self._excludes = set() if excludes_file: if not os.path.exists(excludes_file): raise Config.ConfigError( 'Scalastyle excludes file does not exist: %s' % excludes_file) self.context.log.debug('Using scalastyle excludes file %s' % excludes_file) with open(excludes_file) as fh: for pattern in fh.readlines(): self._excludes.add(re.compile(pattern.strip())) self._scalastyle_bootstrap_key = 'scalastyle' self.register_jvm_tool(self._scalastyle_bootstrap_key, [':scalastyle'])
def get_bootstrap_options(self): """Returns an Options instance that only knows about the bootstrap options.""" if not self._bootstrap_options: flags = set() def capture_the_flags(*args, **kwargs): for flag in Parser.expand_flags(*args, **kwargs): flags.add(flag.name) if flag.inverse_name: flags.add(flag.inverse_name) register_bootstrap_options(capture_the_flags, buildroot=self._buildroot) # Take just the bootstrap args, so we don't choke on other global-scope args on the cmd line. bargs = filter(lambda x: x.partition('=')[0] in flags, self._args or []) self._bootstrap_options = Options(env=self._env, config=self._pre_bootstrap_config, known_scopes=[GLOBAL_SCOPE], args=bargs) register_bootstrap_options(self._bootstrap_options.register_global, buildroot=self._buildroot) bootstrap_option_values = self._bootstrap_options.for_global_scope() Config.reset_default_bootstrap_option_values(values=bootstrap_option_values) # Now re-read the config, post-bootstrapping. Note the order: First whatever we bootstrapped # from (typically pants.ini), then config override, then rcfiles. configpaths = list(self._pre_bootstrap_config.sources()) if bootstrap_option_values.config_override: configpaths.append(bootstrap_option_values.config_override) if bootstrap_option_values.pantsrc: rcfiles = [os.path.expanduser(rcfile) for rcfile in bootstrap_option_values.pantsrc_files] existing_rcfiles = filter(os.path.exists, rcfiles) configpaths.extend(existing_rcfiles) self._post_bootstrap_config = Config.load(configpaths) Config.cache(self._post_bootstrap_config) return self._bootstrap_options
def setUp(self): super(ScalaLibraryTest, self).setUp() self.create_file( 'pants.ini', dedent(''' [compile.scala] runtime-deps: [] ''')) # TODO: Required because target code has no direct config reference. Remove after fixing that. Config.cache(Config.load()) self.add_to_build_file( '3rdparty', dedent(''' jar_library( name='hub-and-spoke', jars=[ jar('org.jalopy', 'hub-and-spoke', '0.0.1') ] ) ''')) self.add_to_build_file( 'scala', dedent(''' scala_library( name='lib', sources=[], java_sources=[ 'java:explicit_scala_dep', 'java:no_scala_dep', ] ) ''')) self.add_to_build_file( 'java', dedent(''' java_library( name='explicit_scala_dep', sources=[], dependencies=[ 'scala:lib', '3rdparty:hub-and-spoke', ] ) java_library( name='no_scala_dep', sources=[], dependencies=[] ) ''')) self.lib_hub_and_spoke = self.target('3rdparty:hub-and-spoke') self.scala_library = self.target('scala:lib') self.java_library_explicit_dep = self.target('java:explicit_scala_dep') self.java_library_no_dep = self.target('java:no_scala_dep')
def _convert(val, acceptable_types): """Ensure that val is one of the acceptable types, converting it if needed. :param val: The value we're parsing. :param acceptable_types: A tuple of expected types for val. :returns: The parsed value """ if isinstance(val, acceptable_types): return val try: parsed_value = eval(val, {}, {}) except Exception as e: raise _parse_error( val, 'Value cannot be evaluated as an expression: ' '{msg}\n{value}\nAcceptable types: ' '{expected}'.format(msg=e, value=Config.format_raw_value(val), expected=format_type_tuple(acceptable_types))) if not isinstance(parsed_value, acceptable_types): raise _parse_error( val, 'Value is not of the acceptable types: ' '{msg}\n{' 'value}'.format(msg=format_type_tuple(acceptable_types), value=Config.format_raw_value(val))) return parsed_value
def get_bootstrap_options(self): """Returns an Options instance that only knows about the bootstrap options.""" if not self._bootstrap_options: flags = set() def capture_the_flags(*args, **kwargs): flags.update(args) register_bootstrap_options(capture_the_flags, buildroot=self._buildroot) # Take just the bootstrap args, so we don't choke on other global-scope args on the cmd line. bargs = filter(lambda x: x.partition('=')[0] in flags, self._args or []) self._bootstrap_options = Options(env=self._env, config=self._pre_bootstrap_config, known_scopes=[GLOBAL_SCOPE], args=bargs) register_bootstrap_options(self._bootstrap_options.register_global, buildroot=self._buildroot) bootstrap_option_values = self._bootstrap_options.for_global_scope() Config.reset_default_bootstrap_option_values(values=bootstrap_option_values) # Now re-read the config, post-bootstrapping. Note the order: First whatever we bootstrapped # from (typically pants.ini), then config override, then rcfiles. configpaths = list(self._pre_bootstrap_config.sources()) if bootstrap_option_values.config_override: configpaths.append(bootstrap_option_values.config_override) if bootstrap_option_values.pantsrc: rcfiles = [os.path.expanduser(rcfile) for rcfile in bootstrap_option_values.pantsrc_files] existing_rcfiles = filter(os.path.exists, rcfiles) configpaths.extend(existing_rcfiles) self._post_bootstrap_config = Config.load(configpaths) Config.cache(self._post_bootstrap_config) return self._bootstrap_options
def setUp(self): # Ensure we get a read of the real pants.ini config Config.reset_default_bootstrap_option_values() real_config = Config.from_cache() super(JvmToolTaskTestBase, self).setUp() # Use a synthetic subclass for bootstrapping within the test, to isolate this from # any bootstrapping the pants run executing the test might need. self.bootstrap_task_type, bootstrap_scope = self.synthesize_task_subtype(BootstrapJvmTools) JvmToolMixin.reset_registered_tools() # Cap BootstrapJvmTools memory usage in tests. The Xmx was empirically arrived upon using # -Xloggc and verifying no full gcs for a test using the full gamut of resolving a multi-jar # tool, constructing a fat jar and then shading that fat jar. self.set_options_for_scope(bootstrap_scope, jvm_options=['-Xmx128m']) def link_or_copy(src, dest): try: os.link(src, dest) except OSError as e: if e.errno == errno.EXDEV: shutil.copy(src, dest) else: raise e def link(path, optional=False, force=False): src = os.path.join(self.real_build_root, path) if not optional or os.path.exists(src): dest = os.path.join(self.build_root, path) safe_mkdir(os.path.dirname(dest)) try: link_or_copy(src, dest) except OSError as e: if force and e.errno == errno.EEXIST: os.unlink(dest) link_or_copy(src, dest) else: raise e return dest def link_tree(path, optional=False, force=False): src = os.path.join(self.real_build_root, path) if not optional or os.path.exists(src): for abspath, dirs, files in safe_walk(src): for f in files: link(os.path.relpath(os.path.join(abspath, f), self.real_build_root), force=force) # TODO(John Sirois): Find a way to do this cleanly link('pants.ini', force=True) # TODO(pl): Note that this pulls in a big chunk of the hairball to every test that # depends on it, because BUILD contains source_roots that specify a variety of types # from different backends. link('BUILD', force=True) link('BUILD.tools', force=True) support_dir = real_config.getdefault('pants_supportdir') link_tree(os.path.relpath(os.path.join(support_dir, 'ivy'), self.real_build_root), force=True)
def setUp(self): # Ensure we get a read of the real pants.ini config Config.reset_default_bootstrap_option_values() real_config = self.config() super(JvmToolTaskTestBase, self).setUp() # Use a synthetic subclass for bootstrapping within the test, to isolate this from # any bootstrapping the pants run executing the test might need. self.bootstrap_task_type, bootstrap_scope = self.synthesize_task_subtype(BootstrapJvmTools) # TODO: We assume that no added jvm_options are necessary to bootstrap successfully in a test. # This may not be true forever. But getting the 'real' value here is tricky, as we have no # access to the enclosing pants run's options here. self.set_options_for_scope(bootstrap_scope, jvm_options=[]) JvmToolTaskMixin.reset_registered_tools() def link_or_copy(src, dest): try: os.link(src, dest) except OSError as e: if e.errno == errno.EXDEV: shutil.copy(src, dest) else: raise e def link(path, optional=False, force=False): src = os.path.join(self.real_build_root, path) if not optional or os.path.exists(src): dest = os.path.join(self.build_root, path) safe_mkdir(os.path.dirname(dest)) try: link_or_copy(src, dest) except OSError as e: if force and e.errno == errno.EEXIST: os.unlink(dest) link_or_copy(src, dest) else: raise e return dest def link_tree(path, optional=False, force=False): src = os.path.join(self.real_build_root, path) if not optional or os.path.exists(src): for abspath, dirs, files in safe_walk(src): for f in files: link(os.path.relpath(os.path.join(abspath, f), self.real_build_root), force=force) # TODO(John Sirois): Find a way to do this cleanly link('pants.ini', force=True) # TODO(pl): Note that this pulls in a big chunk of the hairball to every test that # depends on it, because BUILD contains source_roots that specify a variety of types # from different backends. link('BUILD', force=True) link('BUILD.tools', force=True) support_dir = real_config.getdefault('pants_supportdir') link_tree(os.path.relpath(os.path.join(support_dir, 'ivy'), self.real_build_root), force=True)
def config(self, overrides=''): """Returns a config valid for the test build root.""" if overrides: with temporary_file() as fp: fp.write(overrides) fp.close() with environment_as(PANTS_CONFIG_OVERRIDE=fp.name): return Config.load() else: return Config.load()
def config(self, overrides=''): """Returns a config valid for the test build root.""" ini_file = os.path.join(get_buildroot(), 'pants.ini') if overrides: with temporary_file(cleanup=False) as fp: fp.write(overrides) fp.close() return Config.load([ini_file, fp.name]) else: return Config.load([ini_file])
def bootstrap_option_values(): try: return OptionsBootstrapper(buildroot='<buildroot>').get_bootstrap_options().for_global_scope() finally: # Today, the OptionsBootstrapper mutates global state upon construction in the form of: # Config.reset_default_bootstrap_option_values(...) # As such bootstrap options that use the buildroot get contaminated globally here. We only # need the contaminated values locally though for doc display, thus the reset of global state. # TODO(John Sirois): remove this hack when mutable Config._defaults is killed. Config.reset_default_bootstrap_option_values()
def test_depmap_jar_path(self): with temporary_dir(root_dir=self.workdir_root()) as workdir: test_target = 'examples/tests/java/com/pants/examples/usethrift:usethrift' json_data = self.run_depmap_project_info(test_target, workdir) # Hack because Bootstrapper.instance() reads config from cache. Will go away after we plumb # options into IvyUtil properly. Config.cache(Config.load()) ivy_cache_dir = Bootstrapper.instance().ivy_cache_dir self.assertEquals(json_data.get('libraries').get('commons-lang:commons-lang:2.5'), [os.path.join(ivy_cache_dir, 'commons-lang/commons-lang/jars/commons-lang-2.5.jar')])
def __init__(self, env=None, configpath=None, args=None, buildroot=None): self._buildroot = buildroot or get_buildroot() self._env = env or os.environ.copy() Config.reset_default_bootstrap_option_values(buildroot=self._buildroot) self._pre_bootstrap_config = Config.load([configpath] if configpath else None) self._post_bootstrap_config = None # Will be set later. self._args = args or sys.argv self._bootstrap_options = None # We memoize the bootstrap options here. self._full_options = None # We memoize the full options here. # So other startup code has config to work with. This will go away once we replace direct # config accesses with options, and plumb those through everywhere that needs them. Config.cache(self._pre_bootstrap_config)
def get_bootstrap_options(self): """:returns: an Options instance that only knows about the bootstrap options. :rtype: Options """ if not self._bootstrap_options: flags = set() short_flags = set() def capture_the_flags(*args, **kwargs): for flag in Parser.expand_flags(*args, **kwargs): flags.add(flag.name) if len(flag.name) == 2: short_flags.add(flag.name) if flag.inverse_name: flags.add(flag.inverse_name) register_bootstrap_options(capture_the_flags, buildroot=self._buildroot) def is_bootstrap_option(arg): components = arg.split('=', 1) if components[0] in flags: return True for flag in short_flags: if arg.startswith(flag): return True return False # Take just the bootstrap args, so we don't choke on other global-scope args on the cmd line. # Stop before '--' since args after that are pass-through and may have duplicate names to our # bootstrap options. bargs = filter(is_bootstrap_option, itertools.takewhile(lambda arg: arg != '--', self._args)) self._bootstrap_options = Options(env=self._env, config=self._pre_bootstrap_config, known_scopes=[GLOBAL_SCOPE], args=bargs) register_bootstrap_options(self._bootstrap_options.register_global, buildroot=self._buildroot) bootstrap_option_values = self._bootstrap_options.for_global_scope() Config.reset_default_bootstrap_option_values(values=bootstrap_option_values) # Now re-read the config, post-bootstrapping. Note the order: First whatever we bootstrapped # from (typically pants.ini), then config override, then rcfiles. configpaths = list(self._pre_bootstrap_config.sources()) if bootstrap_option_values.config_override: configpaths.append(bootstrap_option_values.config_override) if bootstrap_option_values.pantsrc: rcfiles = [os.path.expanduser(rcfile) for rcfile in bootstrap_option_values.pantsrc_files] existing_rcfiles = filter(os.path.exists, rcfiles) configpaths.extend(existing_rcfiles) self._post_bootstrap_config = Config.load(configpaths) Config.cache(self._post_bootstrap_config) return self._bootstrap_options
def setUp(self): super(ScalaLibraryTest, self).setUp() self.create_file('pants.ini', dedent(''' [compile.scala] runtime-deps: [] ''')) # TODO: Required because target code has no direct config reference. Remove after fixing that. Config.cache(Config.load()) self.add_to_build_file('3rdparty', dedent(''' jar_library( name='hub-and-spoke', jars=[ jar('org.jalopy', 'hub-and-spoke', '0.0.1') ] ) ''')) self.add_to_build_file('scala', dedent(''' scala_library( name='lib', sources=[], java_sources=[ 'java:explicit_scala_dep', 'java:no_scala_dep', ] ) ''')) self.add_to_build_file('java', dedent(''' java_library( name='explicit_scala_dep', sources=[], dependencies=[ 'scala:lib', '3rdparty:hub-and-spoke', ] ) java_library( name='no_scala_dep', sources=[], dependencies=[] ) ''')) self.lib_hub_and_spoke = self.target('3rdparty:hub-and-spoke') self.scala_library = self.target('scala:lib') self.java_library_explicit_dep = self.target('java:explicit_scala_dep') self.java_library_no_dep = self.target('java:no_scala_dep')
def _initialize_config(self): scalastyle_config = self.context.config.get( self._CONFIG_SECTION, self._CONFIG_SECTION_CONFIG_OPTION) # Scalastyle task by default isn't wired up in pants, but if it is installed # via plugin, then the config file setting is required. if not scalastyle_config: raise Config.ConfigError( 'Scalastyle config is missing from section[{section}] option[{setting}] in ' 'pants.ini.'.format( section=self._CONFIG_SECTION, setting=self._CONFIG_SECTION_CONFIG_OPTION)) # And the config setting value must be a valid file. if not os.path.exists(scalastyle_config): raise Config.ConfigError( 'Scalastyle config file specified in section[{section}] option[{setting}] in pants.ini ' 'does not exist: {file}'.format( section=self._CONFIG_SECTION, setting=self._CONFIG_SECTION_CONFIG_OPTION, file=scalastyle_config)) excludes_file = self.context.config.get( self._CONFIG_SECTION, self._CONFIG_SECTION_EXCLUDES_OPTION) scalastyle_excludes = set() if excludes_file: # excludes setting is optional, but if specified, must be a valid file. if not os.path.exists(excludes_file): raise Config.ConfigError( 'Scalastyle excludes file specified in section[{section}] option[{setting}] in ' 'pants.ini does not exist: {file}'.format( section=self._CONFIG_SECTION, setting=self._CONFIG_SECTION_EXCLUDES_OPTION, file=excludes_file)) with open(excludes_file) as fh: for pattern in fh.readlines(): scalastyle_excludes.add(re.compile(pattern.strip())) self.context.log.debug( 'Scalastyle file exclude pattern: {pattern}'.format( pattern=pattern)) else: # excludes setting is optional. self.context.log.debug( 'Unable to get section[{section}] option[{setting}] value in pants.ini. ' 'All scala sources will be checked.'.format( section=self._CONFIG_SECTION, setting=self._CONFIG_SECTION_EXCLUDES_OPTION)) # Only transfer to local variables to the state at the end to minimize side effects. self._scalastyle_config = scalastyle_config or None self._scalastyle_excludes = scalastyle_excludes or None
def create_config(sample_ini='', defaults=None): """Creates a ``Config`` from the ``sample_ini`` file contents. :param string sample_ini: The contents of the ini file containing the config values. :param dict defaults: An optional dict of global default ini values to seed. """ if not isinstance(sample_ini, Compatibility.string): raise ValueError('The sample_ini supplied must be a string, given: %s' % sample_ini) parser = Config.create_parser(defaults) with io.BytesIO(sample_ini.encode('utf-8')) as ini: parser.readfp(ini) return Config(parser)
def _convert(val, acceptable_types): """Ensure that val is one of the acceptable types, converting it if needed.""" if isinstance(val, acceptable_types): return val try: parsed_value = eval(val, {}, {}) except Exception as e: raise _parse_error(val, 'Value cannot be evaluated: {msg}\n{value}'.format( msg=e.message, value=Config.format_raw_value(val))) if not isinstance(parsed_value, acceptable_types): raise _parse_error(val, 'Value is not of the acceptable types: {msg}\n{value}'.format( msg=acceptable_types, value=Config.format_raw_value(val))) return parsed_value
def get_bootstrap_options(self): """Returns an Options instance that only knows about the bootstrap options.""" if not self._bootstrap_options: flags = set() short_flags = set() def capture_the_flags(*args, **kwargs): for flag in Parser.expand_flags(*args, **kwargs): flags.add(flag.name) if len(flag.name) == 2: short_flags.add(flag.name) if flag.inverse_name: flags.add(flag.inverse_name) register_bootstrap_options(capture_the_flags, buildroot=self._buildroot) def is_bootstrap_option(arg): components = arg.split('=', 1) if components[0] in flags: return True for flag in short_flags: if arg.startswith(flag): return True return False # Take just the bootstrap args, so we don't choke on other global-scope args on the cmd line. # Stop before '--' since args after that are pass-through and may have duplicate names to our # bootstrap options. bargs = filter(is_bootstrap_option, itertools.takewhile(lambda arg: arg != '--', self._args)) self._bootstrap_options = Options(env=self._env, config=self._pre_bootstrap_config, known_scopes=[GLOBAL_SCOPE], args=bargs) register_bootstrap_options(self._bootstrap_options.register_global, buildroot=self._buildroot) bootstrap_option_values = self._bootstrap_options.for_global_scope() Config.reset_default_bootstrap_option_values(values=bootstrap_option_values) # Now re-read the config, post-bootstrapping. Note the order: First whatever we bootstrapped # from (typically pants.ini), then config override, then rcfiles. configpaths = list(self._pre_bootstrap_config.sources()) if bootstrap_option_values.config_override: configpaths.append(bootstrap_option_values.config_override) if bootstrap_option_values.pantsrc: rcfiles = [os.path.expanduser(rcfile) for rcfile in bootstrap_option_values.pantsrc_files] existing_rcfiles = filter(os.path.exists, rcfiles) configpaths.extend(existing_rcfiles) self._post_bootstrap_config = Config.load(configpaths) Config.cache(self._post_bootstrap_config) return self._bootstrap_options
def DISABLED_test_gen_tasks_options_reference_data(self): # TODO(Eric Ayers) Not really part of the test, just to detect the cache poisoning before_support_dir = Config.from_cache().getdefault('pants_supportdir') # can we run our reflection-y goal code without crashing? would be nice Goal.by_name('jack').install(TaskRegistrar('jill', DummyTask)) oref_data = reflect.gen_tasks_options_reference_data() # TODO(Eric Ayers) Not really part of the test, just to detect the cache poisoning after_support_dir = Config.from_cache().getdefault('pants_supportdir') self.assertEquals(before_support_dir, after_support_dir) self.assertTrue(len(oref_data) > 0, 'Tried to generate data for options reference, got emptiness')
def _convert(val, acceptable_types): """Ensure that val is one of the acceptable types, converting it if needed.""" if isinstance(val, acceptable_types): return val try: parsed_value = eval(val, {}, {}) except Exception as e: raise _parse_error( val, 'Value cannot be evaluated: {msg}\n{value}'.format( msg=e.message, value=Config.format_raw_value(val))) if not isinstance(parsed_value, acceptable_types): raise _parse_error( val, 'Value is not of the acceptable types: {msg}\n{value}'.format( msg=acceptable_types, value=Config.format_raw_value(val))) return parsed_value
def resolve(cls, config_file): """Parse a keystore config file and return a list of Keystore objects.""" config = Config.create_parser() try: with open(config_file, 'rb') as keystore_config: config.readfp(keystore_config) except IOError: raise KeystoreResolver.Error('The \'--{0}\' option must point at a valid .ini file holding ' 'keystore definitions.'.format(cls._CONFIG_SECTION)) parser = SingleFileConfig(config_file, config) key_names = config.sections() keys = {} def create_key(key_name): """Instantiate Keystore objects.""" keystore = Keystore(keystore_name=key_name, build_type=parser.get_required(key_name, 'build_type'), keystore_location=parser.get_required(key_name, 'keystore_location'), keystore_alias=parser.get_required(key_name, 'keystore_alias'), keystore_password=parser.get_required(key_name, 'keystore_password'), key_password=parser.get_required(key_name, 'key_password')) return keystore for name in key_names: try: keys[name] = create_key(name) except Config.ConfigError as e: raise KeystoreResolver.Error(e) return keys
def setUp(self): self.build_root = mkdtemp(suffix='_BUILD_ROOT') BuildRoot().path = self.build_root self.create_file('pants.ini') self.build_file_parser = make_default_build_file_parser(self.build_root) self.build_graph = BuildGraph() self.config = Config.load()
def __init__(self, *args, **kwargs): super(Build, self).__init__(*args, **kwargs) if not self.args: self.error("A spec argument is required") self.config = Config.load() interpreters = self.options.interpreters or [b''] self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup(filters=interpreters) interpreters = self.interpreter_cache.select_interpreter( list(self.interpreter_cache.matches(interpreters))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') else: self.debug('Selected %s' % interpreters[0]) self.interpreter = interpreters[0] try: specs_end = self.args.index('--') if len(self.args) > specs_end: self.build_args = self.args[specs_end + 1:len(self.args) + 1] else: self.build_args = [] except ValueError: specs_end = 1 self.build_args = self.args[1:] if len(self.args) > 1 else [] self.targets = OrderedSet() spec_parser = SpecParser(self.root_dir, self.build_file_parser) self.top_level_addresses = set() for spec in self.args[0:specs_end]: try: addresses = spec_parser.parse_addresses(spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) for address in addresses: self.top_level_addresses.add(address) try: self.build_file_parser.inject_address_closure_into_build_graph( address, self.build_graph) target = self.build_graph.get_target(address) except: self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) if not target: self.error("Target %s does not exist" % address) transitive_targets = self.build_graph.transitive_subgraph_of_addresses( [target.address]) for transitive_target in transitive_targets: self.targets.add(transitive_target) self.targets = [target for target in self.targets if target.is_python]
def setup_parser(self, parser, args): parser.set_usage("\n" " %prog build (options) [spec] (build args)\n" " %prog build (options) [spec]... -- (build args)") parser.add_option( "-t", "--timeout", dest="conn_timeout", type="int", default=Config.load().getdefault('connection_timeout'), help="Number of seconds to wait for http connections.") parser.add_option( '-i', '--interpreter', dest='interpreters', default=[], action='append', help="Constrain what Python interpreters to use. Uses Requirement " "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. " "By default, no constraints are used. Multiple constraints may " "be added. They will be ORed together.") parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='Show verbose output.') parser.disable_interspersed_args() parser.epilog = ( 'Builds the specified Python target(s). Use ./pants goal for JVM and other ' 'targets.')
def _maybe_emit_coverage_data(self, targets, chroot, pex, stdout, stderr): coverage = os.environ.get('PANTS_PY_COVERAGE') if coverage is None: yield [] return def read_coverage_list(prefix): return coverage[len(prefix):].split(',') coverage_modules = None if coverage.startswith('modules:'): # NB: pytest-cov maps these modules to the `[run] sources` config. So for # `modules:pants.base,pants.util` the config emitted has: # [run] # source = # pants.base # pants.util # # Now even though these are not paths, coverage sees the dots and switches to a module # prefix-matching mode. Unfortunately, neither wildcards nor top-level module prefixes # like `pants.` serve to engage this module prefix-matching as one might hope. It # appears that `pants.` is treated as a path and `pants.*` is treated as a literal # module prefix name. coverage_modules = read_coverage_list('modules:') elif coverage.startswith('paths:'): coverage_modules = [] for path in read_coverage_list('paths:'): if not os.path.exists(path) and not os.path.isabs(path): # Look for the source in the PEX chroot since its not available from CWD. path = os.path.join(chroot, path) coverage_modules.append(path) with self._cov_setup(targets, chroot, coverage_modules=coverage_modules) as (args, coverage_rc): try: yield args finally: with environment_as(PEX_MODULE='coverage.cmdline:main'): # Normalize .coverage.raw paths using combine and `paths` config in the rc file. # This swaps the /tmp pex chroot source paths for the local original source paths # the pex was generated from and which the user understands. shutil.move('.coverage', '.coverage.raw') pex.run(args=['combine', '--rcfile', coverage_rc], stdout=stdout, stderr=stderr) pex.run(args=['report', '-i', '--rcfile', coverage_rc], stdout=stdout, stderr=stderr) # TODO(wickman): If coverage is enabled and we are not using fast mode, write an # intermediate .html that points to each of the coverage reports generated and # webbrowser.open to that page. # TODO(John Sirois): Possibly apply the same logic to the console report. In fact, # consider combining coverage files from all runs in this Tasks's execute and then # producing just 1 console and 1 html report whether or not the tests are run in fast # mode. relpath = Target.maybe_readable_identify(targets) pants_distdir = Config.from_cache().getdefault('pants_distdir') target_dir = os.path.join(pants_distdir, 'coverage', relpath) safe_mkdir(target_dir) pex.run(args=['html', '-i', '--rcfile', coverage_rc, '-d', target_dir], stdout=stdout, stderr=stderr)
def _create_config(self, config): with open(os.path.join(safe_mkdtemp(), 'test_config.ini'), 'w') as fp: for section, options in config.items(): fp.write('[{}]\n'.format(section)) for key, value in options.items(): fp.write('{}: {}\n'.format(key, value)) return Config.load(configpaths=[fp.name])
def select_binary_stream(base_path, version, name, config=None, url_opener=None): """Select a binary matching the current os and architecture. :param url_opener: Optional argument used only for testing, to 'pretend' to open urls. :returns: a 'stream' to download it from a support directory. The returned 'stream' is actually a lambda function which returns the files binary contents. :raises: :class:`pants.binary_util.BinaryUtil.BinaryNotFound` if no binary of the given version and name could not be found. """ config = config or Config.load() baseurls = config.getdefault('pants_support_baseurls', type=list, default=[]) if not baseurls: raise BinaryUtil.NoBaseUrlsError( 'No urls are defined under pants_support_baseurls in the DEFAULT section of pants.ini.') timeout_secs = config.getdefault('pants_support_fetch_timeout_secs', type=int, default=30) binary_path = select_binary_base_path(base_path, version, name) if url_opener is None: url_opener = lambda u: closing(urllib_request.urlopen(u, timeout=timeout_secs)) downloaded_successfully = False accumulated_errors = [] for baseurl in OrderedSet(baseurls): # Wrap in OrderedSet because duplicates are wasteful. url = posixpath.join(baseurl, binary_path) log.info('Attempting to fetch {name} binary from: {url} ...'.format(name=name, url=url)) try: with url_opener(url) as binary: log.info('Fetched {name} binary from: {url} .'.format(name=name, url=url)) downloaded_successfully = True yield lambda: binary.read() break except (IOError, urllib_error.HTTPError, urllib_error.URLError, ValueError) as e: accumulated_errors.append('Failed to fetch binary from {url}: {error}' .format(url=url, error=e)) if not downloaded_successfully: raise BinaryUtil.BinaryNotFound((base_path, version, name), accumulated_errors)
def __init__(self, bootstrap_dir=None, baseurls=None, timeout=None, config=None, binary_base_path_strategy=None): """Creates a BinaryUtil with the given settings to define binary lookup behavior. Relevant settings may either be specified in the arguments, or will be loaded from the given config file. :param bootstrap_dir: Directory search for binaries in, or download binaries to if needed. Defaults to the value of 'pants_bootstrapdir' in config if unspecified. :param baseurls: List of url prefixes which represent repositories of binaries. Defaults to the value of 'pants_support_baseurls' in config if unspecified. :param timeout: Timeout in seconds for url reads. Defaults to the value of 'pants_support_fetch_timeout_secs' in config if unspecified, or 30 seconds if that value isn't found in config. :param config: Config object to lookup parameters which are left unspecified as None. If config is left unspecified, it defaults to pants.ini via Config.from_cache(). :param binary_base_path_strategy: Optional function to override default select_binary_base_path behavior. Takes in parameters (base_path, version, name) and returns a relative path to a binary. This relative path is used both for appending to the baseurl to determine the full url to the binary, and as the path to the subfolder the binary is stored in under the bootstrap_dir. """ if bootstrap_dir is None or baseurls is None or timeout is None: config = config or Config.from_cache() if bootstrap_dir is None: bootstrap_dir = config.getdefault('pants_bootstrapdir') if baseurls is None: baseurls = config.getdefault('pants_support_baseurls', type=list, default=[]) if timeout is None: timeout = config.getdefault('pants_support_fetch_timeout_secs', type=int, default=30) bootstrap_dir = os.path.realpath(os.path.expanduser(bootstrap_dir)) self._boostrap_dir = bootstrap_dir self._timeout = timeout self._baseurls = baseurls self._binary_base_path_strategy = binary_base_path_strategy
def resolve(cls, config_file): """Parse a keystore config file and return a list of Keystore objects.""" config_file = os.path.expanduser(config_file) config = Config.create_parser() try: with open(config_file, 'rb') as keystore_config: config.readfp(keystore_config) except IOError as e: raise KeystoreResolver.Error( 'Problem parsing config at {}: {}'.format(config_file, e)) parser = SingleFileConfig(config_file, config) key_names = config.sections() keys = {} def create_key(key_name): """Instantiate Keystore objects.""" keystore = Keystore( keystore_name=key_name, build_type=parser.get_required(key_name, 'build_type'), keystore_location=parser.get_required(key_name, 'keystore_location'), keystore_alias=parser.get_required(key_name, 'keystore_alias'), keystore_password=parser.get_required(key_name, 'keystore_password'), key_password=parser.get_required(key_name, 'key_password')) return keystore for name in key_names: try: keys[name] = create_key(name) except Config.ConfigError as e: raise KeystoreResolver.Error(e) return keys
def check_config_file(path): cp = Config.create_parser() with open(path, 'r') as ini: cp.readfp(ini) print('Checking config file at {0} for unmigrated keys.'.format(path), file=sys.stderr) def section(s): return cyan('[{0}]'.format(s)) for src, dst in migrations.items(): check_option(cp, src, dst) # Special-case handling of per-task subsystem options, so we can sweep them up in all # sections easily. def check_task_subsystem_options(subsystem_sec, options_map, sections=None): sections = sections or cp.sections() for src_sec in ['DEFAULT'] + sections: dst_sec = subsystem_sec if src_sec == 'DEFAULT' else '{}.{}'.format(subsystem_sec, src_sec) for src_key, dst_key in options_map.items(): check_option(cp, (src_sec, src_key), (dst_sec, dst_key)) artifact_cache_options_map = { 'read_from_artifact_cache': 'read', 'write_to_artifact_cache': 'write', 'overwrite_cache_artifacts': 'overwrite', 'read_artifact_caches': 'read_from', 'write_artifact_caches': 'write_to', 'cache_compression': 'compression_level', } check_task_subsystem_options('cache', artifact_cache_options_map) jvm_options_map = { 'jvm_options': 'options', 'args': 'program_args', 'debug': 'debug', 'debug_port': 'debug_port', 'debug_args': 'debug_args', } jvm_options_sections = [ 'repl.scala', 'test.junit', 'run.jvm', 'bench', 'doc.javadoc', 'doc.scaladoc' ] check_task_subsystem_options('jvm', jvm_options_map, sections=jvm_options_sections) # Check that all values are parseable. for sec in ['DEFAULT'] + cp.sections(): for key, value in cp.items(sec): value = value.strip() if value.startswith('['): try: custom_types.list_type(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON list.'.format(key=green(key), section=section(sec))) elif value.startswith('{'): try: custom_types.dict_type(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON object.'.format(key=green(key), section=section(sec)))
def setUp(self): with temporary_file() as ini: ini.write( ''' [DEFAULT] answer: 42 scale: 1.2 path: /a/b/%(answer)s embed: %(path)s::foo disclaimer: Let it be known that. [a] fast: True list: [1, 2, 3, %(answer)s] [b] preempt: False dict: { 'a': 1, 'b': %(answer)s, 'c': ['%(answer)s', %(answer)s] } ''') ini.close() self.config = Config.load(configpath=ini.name)
def select_binary(base_path, version, name, config=None): """Selects a binary matching the current os and architecture. :raises: :class:`pants.binary_util.BinaryUtil.BinaryNotFound` if no binary of the given version and name could be found. """ # TODO(John Sirois): finish doc of the path structure expexcted under base_path config = config or Config.load() bootstrap_dir = config.getdefault('pants_bootstrapdir') binary_path = select_binary_base_path(base_path, version, name) bootstrapped_binary_path = os.path.join(bootstrap_dir, binary_path) if not os.path.exists(bootstrapped_binary_path): downloadpath = bootstrapped_binary_path + '~' try: with select_binary_stream(base_path, version, name, config) as stream: with safe_open(downloadpath, 'wb') as bootstrapped_binary: bootstrapped_binary.write(stream()) os.rename(downloadpath, bootstrapped_binary_path) chmod_plus_x(bootstrapped_binary_path) finally: safe_delete(downloadpath) log.debug('Selected {binary} binary bootstrapped to: {path}' .format(binary=name, path=bootstrapped_binary_path)) return bootstrapped_binary_path
def setup(self): options_bootstrapper = OptionsBootstrapper() # Force config into the cache so we (and plugin/backend loading code) can use it. # TODO: Plumb options in explicitly. options_bootstrapper.get_bootstrap_options() self.config = Config.from_cache() # Add any extra paths to python path (eg for loading extra source backends) extra_paths = self.config.getlist('backends', 'python-path', []) if extra_paths: sys.path.extend(extra_paths) # Load plugins and backends. backend_packages = self.config.getlist('backends', 'packages', []) plugins = self.config.getlist('backends', 'plugins', []) build_configuration = load_plugins_and_backends(plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scopes = [''] for goal in Goal.all(): # Note that enclosing scopes will appear before scopes they enclose. known_scopes.extend(filter(None, goal.known_scopes())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options(known_scopes=known_scopes) self.register_options() self.run_tracker = RunTracker.from_config(self.config) report = initial_reporting(self.config, self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser(build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) self.address_mapper = BuildFileAddressMapper(self.build_file_parser) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in self.config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() self._expand_goals_and_specs()
def __init__(self, target, root_dir, extra_targets=None, extra_requirements=None, builder=None, platforms=None, interpreter=None, conn_timeout=None): self._config = Config.load() self._target = target self._root = root_dir self._platforms = platforms self._interpreter = interpreter or PythonInterpreter.get() self._extra_targets = list( extra_targets) if extra_targets is not None else [] self._extra_requirements = list( extra_requirements) if extra_requirements is not None else [] self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter) # Note: unrelated to the general pants artifact cache. self._egg_cache_root = os.path.join( PythonSetup(self._config).scratch_dir('artifact_cache', default_name='artifacts'), str(self._interpreter.identity)) self._key_generator = CacheKeyGenerator() self._build_invalidator = BuildInvalidator(self._egg_cache_root)
def resolve(cls, config_file): """Parse a keystore config file and return a list of Keystore objects.""" config_file = os.path.expanduser(config_file) config = Config.create_parser() try: with open(config_file, 'rb') as keystore_config: config.readfp(keystore_config) except IOError as e: raise KeystoreResolver.Error('Problem parsing config at {}: {}'.format(config_file, e)) parser = SingleFileConfig(config_file, config) key_names = config.sections() keys = {} def create_key(key_name): """Instantiate Keystore objects.""" keystore = Keystore(keystore_name=key_name, build_type=parser.get_required(key_name, 'build_type'), keystore_location=parser.get_required(key_name, 'keystore_location'), keystore_alias=parser.get_required(key_name, 'keystore_alias'), keystore_password=parser.get_required(key_name, 'keystore_password'), key_password=parser.get_required(key_name, 'key_password')) return keystore for name in key_names: try: keys[name] = create_key(name) except Config.ConfigError as e: raise KeystoreResolver.Error(e) return keys
def __init__(self, target, run_tracker, interpreter=None, conn_timeout=None): self.target = target self.interpreter = interpreter or PythonInterpreter.get() if not isinstance(target, PythonBinary): raise PythonBinaryBuilder.NotABinaryTargetException( "Target %s is not a PythonBinary!" % target) config = Config.load() self.distdir = config.getdefault('pants_distdir') distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name) run_info = run_tracker.run_info build_properties = {} build_properties.update( run_info.add_basic_info(run_id=None, timestamp=time.time())) build_properties.update(run_info.add_scm_info()) pexinfo = target.pexinfo.copy() pexinfo.build_properties = build_properties builder = PEXBuilder(distpath, pex_info=pexinfo, interpreter=self.interpreter) self.chroot = PythonChroot(targets=[target], builder=builder, platforms=target.platforms, interpreter=self.interpreter, conn_timeout=conn_timeout)
def __init__(self, target, root_dir, extra_targets=None, extra_requirements=None, builder=None, platforms=None, interpreter=None, conn_timeout=None): self._config = Config.load() self._target = target self._root = root_dir self._platforms = platforms self._interpreter = interpreter or PythonInterpreter.get() self._extra_targets = list(extra_targets) if extra_targets is not None else [] self._extra_requirements = list(extra_requirements) if extra_requirements is not None else [] self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter) # Note: unrelated to the general pants artifact cache. self._egg_cache_root = os.path.join( PythonSetup(self._config).scratch_dir('artifact_cache', default_name='artifacts'), str(self._interpreter.identity)) self._key_generator = CacheKeyGenerator() self._build_invalidator = BuildInvalidator( self._egg_cache_root)
def __init__(self): """Creates an ivy bootstrapper.""" self._config = Config.load() self._bootstrap_jar_url = self._config.get("ivy", "bootstrap_jar_url", default=self._DEFAULT_URL) self._timeout_secs = self._config.getint("ivy", "bootstrap_fetch_timeout_secs", default=1) self._version_or_ivyxml = self._config.get("ivy", "ivy_profile", default=self._DEFAULT_VERSION) self._classpath = None
def setUp(self): with temporary_file() as ini: ini.write(''' [DEFAULT] answer: 42 scale: 1.2 path: /a/b/%(answer)s embed: %(path)s::foo disclaimer: Let it be known that. [a] fast: True list: [1, 2, 3, %(answer)s] [b] preempt: False dict: { 'a': 1, 'b': %(answer)s, 'c': ['%(answer)s', %(answer)s] } ''') ini.close() self.config = Config.load(configpath=ini.name)
def __init__(self, target, run_tracker, interpreter=None): self.target = target self.interpreter = interpreter or PythonInterpreter.get() if not isinstance(target, PythonBinary): raise PythonBinaryBuilder.NotABinaryTargetException( "Target %s is not a PythonBinary!" % target) config = Config.from_cache() self.distdir = config.getdefault('pants_distdir') distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name) run_info = run_tracker.run_info build_properties = {} build_properties.update(run_info.add_basic_info(run_id=None, timestamp=time.time())) build_properties.update(run_info.add_scm_info()) pexinfo = target.pexinfo.copy() pexinfo.build_properties = build_properties builder = PEXBuilder(distpath, pex_info=pexinfo, interpreter=self.interpreter) self.chroot = PythonChroot( targets=[target], builder=builder, platforms=target.platforms, interpreter=self.interpreter)
def test_depmap_jar_path(self): with temporary_dir(root_dir=self.workdir_root()) as workdir: test_target = 'examples/tests/java/com/pants/examples/usethrift:usethrift' json_data = self.run_depmap_project_info(test_target, workdir) # Hack because Bootstrapper.instance() reads config from cache. Will go away after we plumb # options into IvyUtil properly. Config.cache(Config.load()) ivy_cache_dir = Bootstrapper.instance().ivy_cache_dir self.assertEquals( json_data.get('libraries').get( 'commons-lang:commons-lang:2.5'), [ os.path.join( ivy_cache_dir, 'commons-lang/commons-lang/jars/commons-lang-2.5.jar') ])
def setUp(self): self.build_root = mkdtemp(suffix='_BUILD_ROOT') BuildRoot().path = self.build_root self.create_file('pants.ini') self.build_file_parser = make_default_build_file_parser( self.build_root) self.build_graph = BuildGraph() self.config = Config.load()
def __init__(self): """Creates an ivy bootstrapper.""" self._config = Config.from_cache() self._bootstrap_jar_url = self._config.get('ivy', 'bootstrap_jar_url', default=self._DEFAULT_URL) self._timeout_secs = self._config.getint('ivy', 'bootstrap_fetch_timeout_secs', default=1) self._version_or_ivyxml = self._config.get('ivy', 'ivy_profile', default=self._DEFAULT_VERSION) self._classpath = None
def __init__(self, run_tracker, root_dir, parser, args, build_file_parser, address_mapper, build_graph, needs_old_options=True): """run_tracker: The (already opened) RunTracker to track this run with root_dir: The root directory of the pants workspace parser: an OptionParser args: the subcommand arguments to parse""" self.run_tracker = run_tracker self.root_dir = root_dir self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.build_graph = build_graph config = Config.from_cache() with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): # construct base parameters to be filled in for BuildGraph for path in config.getlist('goals', 'bootstrap_buildfiles', default=[]): build_file = BuildFile.from_cache(root_dir=self.root_dir, relpath=path) # TODO(pl): This is an unfortunate interface leak, but I don't think # in the long run that we should be relying on "bootstrap" BUILD files # that do nothing except modify global state. That type of behavior # (e.g. source roots, goal registration) should instead happen in # project plugins, or specialized configuration files. self.build_file_parser.parse_build_file_family(build_file) # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info() # Override the OptionParser's error with more useful output def error(message=None, show_help=True): if message: print(message + '\n') if show_help: parser.print_help() parser.exit(status=1) parser.error = error self.error = error self.register_options() self.setup_parser(parser, args) if needs_old_options: self.old_options, self.args = parser.parse_args(args) else: # Ensure a predictable error if anything under goal tries to use these. self.old_options = None self.args = None
def create_context(config='', options=None, target_roots=None, **kwargs): """Creates a ``Context`` with no config values, options, or targets by default. :param config: Either a ``Context`` object or else a string representing the contents of the pants.ini to parse the config from. :param options: An optional dict of scope -> (dict of name -> new-style option values). :param target_roots: An optional list of target roots to seed the context target graph from. :param ``**kwargs``: Any additional keyword arguments to pass through to the Context constructor. """ config = config if isinstance(config, Config) else create_config(config) # TODO: Get rid of this temporary hack after we plumb options through everywhere and can get # rid of the config cache. Config.cache(config) run_tracker = create_run_tracker() target_roots = maybe_list(target_roots, Target) if target_roots else [] return Context(config, create_options(options or {}), run_tracker, target_roots, **kwargs)
def generate_coverage_config(target): cp = configparser.ConfigParser() cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG)) cp.add_section('html') target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage', os.path.dirname(target.address.buildfile.relpath), target.name) safe_mkdir(target_dir) cp.set('html', 'directory', target_dir) return cp
def setUp(self): with temporary_file() as ini: ini.write(''' [python-setup] platforms: [ 'current', 'linux-x86_64'] ''') ini.close() self.config = Config.load(configpath=ini.name)
def __init__(self): """Creates an ivy bootstrapper.""" self._config = Config.from_cache() self._bootstrap_jar_url = self._config.get('ivy', 'bootstrap_jar_url', default=self._DEFAULT_URL) self._timeout_secs = self._config.getint( 'ivy', 'bootstrap_fetch_timeout_secs', default=1) self._version_or_ivyxml = self._config.get( 'ivy', 'ivy_profile', default=self._DEFAULT_VERSION) self._classpath = None
def create_config(sample_ini=''): """Creates a ``Config`` from the ``sample_ini`` file contents. :param string sample_ini: The contents of the ini file containing the config values. """ if not isinstance(sample_ini, Compatibility.string): raise ValueError('The sample_ini supplied must be a string, given: %s' % sample_ini) parser = Config.create_parser() with io.BytesIO(sample_ini.encode('utf-8')) as ini: parser.readfp(ini) return SingleFileConfig('dummy/path', parser)