def _do_test(self, expected_val, s): if isinstance(expected_val, dict): val = dict_option(s).val elif isinstance(expected_val, (list, tuple)): val = list_option(s).val else: raise Exception('Expected value {0} is of unsupported type: {1}'.format(expected_val, type(expected_val))) self.assertEquals(expected_val, val)
def get_config_file_paths(env, args): """Get the location of the config files. The locations are specified by the --pants-config-files option. However we need to load the config in order to process the options. This method special-cases --pants-config-files in order to solve this chicken-and-egg problem. Note that, obviously, it's not possible to set the location of config files in a config file. Doing so will have no effect. """ # This exactly mirrors the logic applied in Option to all regular options. Note that we'll # also parse --pants-config as a regular option later, but there's no harm in that. In fact, # it's preferable, so that any code that happens to want to know where we read config from # can inspect the option. flag = '--pants-config-files=' evars = ['PANTS_DEFAULT_PANTS_CONFIG_FILES', 'PANTS_PANTS_CONFIG_FILES', 'PANTS_CONFIG_FILES'] paths_str = None for arg in args: # Technically this is very slightly incorrect, as we don't check scope. But it's # very unlikely that any task or subsystem will have an option named --pants-config-files. # TODO: Enforce a ban on options with a --pants- prefix outside our global options? if arg.startswith(flag): paths_str = arg[len(flag):] break if not paths_str: for var in evars: if var in env: paths_str = env[var] break if paths_str: paths = list_option(paths_str) else: paths = [get_default_pants_config_file()] return paths
def parse_typed_list(val_str): return None if val_str is None else [value_type(x) for x in list_option(expand(val_str))]
def check_config_file(path): cp = Config._create_parser() with open(path, 'r') as ini: cp.readfp(ini) print('Checking config file at {} for unmigrated keys.'.format(path), file=sys.stderr) def section(s): return cyan('[{}]'.format(s)) for src, dst in migrations.items(): check_option(cp, src, dst) # Special-case handling of per-task subsystem options, so we can sweep them up in all # sections easily. def check_task_subsystem_options(subsystem_sec, options_map, sections=None): sections = sections or cp.sections() for src_sec in ['DEFAULT'] + sections: dst_sec = subsystem_sec if src_sec == 'DEFAULT' else '{}.{}'.format( subsystem_sec, src_sec) for src_key, dst_key in options_map.items(): check_option(cp, (src_sec, src_key), (dst_sec, dst_key)) artifact_cache_options_map = { 'read_from_artifact_cache': 'read', 'write_to_artifact_cache': 'write', 'overwrite_cache_artifacts': 'overwrite', 'read_artifact_caches': 'read_from', 'write_artifact_caches': 'write_to', 'cache_compression': 'compression_level', } check_task_subsystem_options('cache', artifact_cache_options_map) jvm_options_map = { 'jvm_options': 'options', 'args': 'program_args', 'debug': 'debug', 'debug_port': 'debug_port', 'debug_args': 'debug_args', } jvm_options_sections = [ 'repl.scala', 'test.junit', 'run.jvm', 'bench', 'doc.javadoc', 'doc.scaladoc' ] check_task_subsystem_options('jvm', jvm_options_map, sections=jvm_options_sections) # Check that all values are parseable. for sec in ['DEFAULT'] + cp.sections(): for key, value in cp.items(sec): value = value.strip() if value.startswith('['): try: custom_types.list_option(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON list.'.format(key=green(key), section=section(sec))) elif value.startswith('{'): try: custom_types.dict_option(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON object.'.format(key=green(key), section=section(sec)))
def check_config_file(path): cp = Config._create_parser() with open(path, 'r') as ini: cp.readfp(ini) print('Checking config file at {} for unmigrated keys.'.format(path), file=sys.stderr) def section(s): return cyan('[{}]'.format(s)) for src, dst in migrations.items(): check_option(cp, src, dst) # Special-case handling of per-task subsystem options, so we can sweep them up in all # sections easily. def check_task_subsystem_options(subsystem_sec, options_map, sections=None): sections = sections or cp.sections() for src_sec in ['DEFAULT'] + sections: dst_sec = subsystem_sec if src_sec == 'DEFAULT' else '{}.{}'.format(subsystem_sec, src_sec) for src_key, dst_key in options_map.items(): check_option(cp, (src_sec, src_key), (dst_sec, dst_key)) artifact_cache_options_map = { 'read_from_artifact_cache': 'read', 'write_to_artifact_cache': 'write', 'overwrite_cache_artifacts': 'overwrite', 'read_artifact_caches': 'read_from', 'write_artifact_caches': 'write_to', 'cache_compression': 'compression_level', } check_task_subsystem_options('cache', artifact_cache_options_map) jvm_options_map = { 'jvm_options': 'options', 'args': 'program_args', 'debug': 'debug', 'debug_port': 'debug_port', 'debug_args': 'debug_args', } jvm_options_sections = [ 'repl.scala', 'test.junit', 'run.jvm', 'bench', 'doc.javadoc', 'doc.scaladoc' ] check_task_subsystem_options('jvm', jvm_options_map, sections=jvm_options_sections) # Check that all values are parseable. for sec in ['DEFAULT'] + cp.sections(): for key, value in cp.items(sec): value = value.strip() if value.startswith('['): try: custom_types.list_option(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON list.'.format(key=green(key), section=section(sec))) elif value.startswith('{'): try: custom_types.dict_option(value) except ParseError: print('Value of {key} in section {section} is not a valid ' 'JSON object.'.format(key=green(key), section=section(sec)))
def check_config_file(path): cp = Config.create_parser() with open(path, "r") as ini: cp.readfp(ini) print("Checking config file at {0} for unmigrated keys.".format(path), file=sys.stderr) def section(s): return cyan("[{0}]".format(s)) for src, dst in migrations.items(): check_option(cp, src, dst) # Special-case handling of per-task subsystem options, so we can sweep them up in all # sections easily. def check_task_subsystem_options(subsystem_sec, options_map, sections=None): sections = sections or cp.sections() for src_sec in ["DEFAULT"] + sections: dst_sec = subsystem_sec if src_sec == "DEFAULT" else "{}.{}".format(subsystem_sec, src_sec) for src_key, dst_key in options_map.items(): check_option(cp, (src_sec, src_key), (dst_sec, dst_key)) artifact_cache_options_map = { "read_from_artifact_cache": "read", "write_to_artifact_cache": "write", "overwrite_cache_artifacts": "overwrite", "read_artifact_caches": "read_from", "write_artifact_caches": "write_to", "cache_compression": "compression_level", } check_task_subsystem_options("cache", artifact_cache_options_map) jvm_options_map = { "jvm_options": "options", "args": "program_args", "debug": "debug", "debug_port": "debug_port", "debug_args": "debug_args", } jvm_options_sections = ["repl.scala", "test.junit", "run.jvm", "bench", "doc.javadoc", "doc.scaladoc"] check_task_subsystem_options("jvm", jvm_options_map, sections=jvm_options_sections) # Check that all values are parseable. for sec in ["DEFAULT"] + cp.sections(): for key, value in cp.items(sec): value = value.strip() if value.startswith("["): try: custom_types.list_option(value) except ParseError: print( "Value of {key} in section {section} is not a valid " "JSON list.".format(key=green(key), section=section(sec)) ) elif value.startswith("{"): try: custom_types.dict_option(value) except ParseError: print( "Value of {key} in section {section} is not a valid " "JSON object.".format(key=green(key), section=section(sec)) )