def _read_tree_config(self): """Reads an in-tree config file""" dirs = self.query_abs_dirs() test_install_dir = dirs.get('abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')) if 'in_tree_config' in self.config: rel_tree_config_path = self.config['in_tree_config'] tree_config_path = os.path.join(test_install_dir, rel_tree_config_path) if not os.path.isfile(tree_config_path): self.fatal("The in-tree configuration file '%s' does not exist!" "It must be added to '%s'. See bug 1035551 for more details." % (tree_config_path, os.path.join('gecko', 'testing', rel_tree_config_path))) try: self.tree_config.update(parse_config_file(tree_config_path)) except: msg = "There was a problem parsing the in-tree configuration file '%s'!" % \ os.path.join('gecko', 'testing', rel_tree_config_path) self.exception(message=msg, level=FATAL) self.dump_config(file_path=os.path.join(dirs['abs_log_dir'], 'treeconfig.json'), config=self.tree_config) if (self.buildbot_config and 'properties' in self.buildbot_config and self.buildbot_config['properties'].get('branch') == 'try'): try_config_path = os.path.join(test_install_dir, 'config', 'mozharness', 'try_arguments.py') known_try_arguments = parse_config_file(try_config_path) self.set_extra_try_arguments(known_try_arguments) self.tree_config.lock()
def test_dump_config_hierarchy_matches_self_config(self): try: ###### # we need temp_cfg because self.s will be gcollected (NoneType) by # the time we get to SystemExit exception # temp_cfg will differ from self.s.config because of # 'dump_config_hierarchy'. we have to make a deepcopy because # config is a locked dict temp_s = script.BaseScript( initial_config_file='test/test.json', option_args=['--cfg', 'test/test_override.py,test/test_override2.py'], ) from copy import deepcopy temp_cfg = deepcopy(temp_s.config) temp_cfg.update({'dump_config_hierarchy': True}) ###### self.s = script.BaseScript( initial_config_file='test/test.json', option_args=['--cfg', 'test/test_override.py,test/test_override2.py'], config={'dump_config_hierarchy': True} ) except SystemExit: local_cfg_files = parse_config_file('test_logs/localconfigfiles.json') # finally let's just make sure that all the items added up, equals # what we started with: self.config target_cfg = {} for cfg_file in local_cfg_files: target_cfg.update(local_cfg_files[cfg_file]) self.assertEqual( target_cfg, temp_cfg, msg="all of the items (combined) in each cfg file dumped via " "--dump-config-hierarchy does not equal self.config " )
def test_dump_config_equals_self_config(self): try: ###### # we need temp_cfg because self.s will be gcollected (NoneType) by # the time we get to SystemExit exception # temp_cfg will differ from self.s.config because of # 'dump_config_hierarchy'. we have to make a deepcopy because # config is a locked dict temp_s = script.BaseScript( initial_config_file='test/test.json', option_args=['--cfg', 'test/test_override.py,test/test_override2.py'], ) from copy import deepcopy temp_cfg = deepcopy(temp_s.config) temp_cfg.update({'dump_config': True}) ###### self.s = script.BaseScript( initial_config_file='test/test.json', option_args=['--cfg', 'test/test_override.py,test/test_override2.py'], config={'dump_config': True} ) except SystemExit: target_cfg = parse_config_file('test_logs/localconfig.json') self.assertEqual( target_cfg, temp_cfg, msg="all of the items (combined) in each cfg file dumped via " "--dump-config does not equal self.config " )
def query_release_config(self): if self.release_config: return self.release_config c = self.config dirs = self.query_abs_dirs() if c.get("release_config_file"): self.info("Getting release config from %s..." % c["release_config_file"]) rc = None try: rc = parse_config_file( os.path.join(dirs['abs_work_dir'], c["release_config_file"]), config_dict_name="releaseConfig" ) except IOError: self.fatal("Release config file %s not found!" % c["release_config_file"]) except RuntimeError: self.fatal("Invalid release config file %s!" % c["release_config_file"]) self.release_config['version'] = rc['version'] self.release_config['buildnum'] = rc['buildNumber'] self.release_config['ftp_server'] = rc['stagingServer'] self.release_config['ftp_user'] = c.get('ftp_user', rc['hgUsername']) self.release_config['ftp_ssh_key'] = c.get('ftp_ssh_key', rc['hgSshKey']) self.release_config['release_channel'] = rc['releaseChannel'] else: self.info("No release config file; using default config.") for key in ('version', 'buildnum', 'ftp_server', 'ftp_user', 'ftp_ssh_key'): self.release_config[key] = c[key] self.info("Release config:\n%s" % self.release_config) return self.release_config
def parse_locales_file(self, locales_file): locales = [] c = self.config platform = c.get("locales_platform", None) ignore_locales = c.get("ignore_locales", None) if locales_file.endswith('json'): locales_json = parse_config_file(locales_file) self.locale_dict = {} for locale in locales_json.keys(): if platform and platform not in locales_json[locale]['platforms']: continue locales.append(locale) self.locale_dict[locale] = locales_json[locale]['revision'] else: fh = open(locales_file) locales = fh.read().split() fh.close() if ignore_locales: for locale in ignore_locales: if locale in locales: self.debug("Ignoring locale %s." % locale) locales.remove(locale) if locales: self.locales = locales return self.locales
def _read_tree_config(self): """Reads an in-tree config file""" dirs = self.query_abs_dirs() test_install_dir = dirs.get('abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')) if 'in_tree_config' in self.config: rel_tree_config_path = self.config['in_tree_config'] tree_config_path = os.path.join(test_install_dir, rel_tree_config_path) if not os.path.isfile(tree_config_path): self.fatal("The in-tree configuration file '%s' does not exist!" "It must be added to '%s'. See bug 981030 for more details." % (tree_config_path, os.path.join('gecko', 'testing', rel_tree_config_path))) try: self.tree_config.update(parse_config_file(tree_config_path)) except: msg = "There was a problem parsing the in-tree configuration file '%s'!" % \ os.path.join('gecko', 'testing', rel_tree_config_path) self.exception(message=msg, level=FATAL) self.dump_config(file_path=os.path.join(dirs['abs_log_dir'], 'treeconfig.json'), config=self.tree_config) self.tree_config.lock()
def _read_tree_config(self): """Reads an in-tree config file""" dirs = self.query_abs_dirs() test_install_dir = dirs.get( 'abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')) if 'in_tree_config' in self.config: rel_tree_config_path = self.config['in_tree_config'] tree_config_path = os.path.join(test_install_dir, rel_tree_config_path) if not os.path.isfile(tree_config_path): self.fatal( "The in-tree configuration file '%s' does not exist!" "It must be added to '%s'. See bug 1035551 for more details." % (tree_config_path, os.path.join('goanna', 'testing', rel_tree_config_path))) try: self.tree_config.update(parse_config_file(tree_config_path)) except: msg = "There was a problem parsing the in-tree configuration file '%s'!" % \ os.path.join('goanna', 'testing', rel_tree_config_path) self.exception(message=msg, level=FATAL) self.dump_config(file_path=os.path.join(dirs['abs_log_dir'], 'treeconfig.json'), config=self.tree_config) if (self.buildbot_config and 'properties' in self.buildbot_config and self.buildbot_config['properties'].get('branch') == 'try'): try_config_path = os.path.join(test_install_dir, 'config', 'mozharness', 'try_arguments.py') known_try_arguments = parse_config_file(try_config_path) comments = self.buildbot_config['sourcestamp']['changes'][-1][ 'comments'] if not comments and 'try_syntax' in self.buildbot_config[ 'properties']: # If we don't find try syntax in the usual place, check for it in an # alternate property available to tools using self-serve. comments = self.buildbot_config['properties']['try_syntax'] self.parse_extra_try_arguments(comments, known_try_arguments) self.tree_config.lock()
def query_talos_json_config(self): """Return the talos json config.""" if self.talos_json_config: return self.talos_json_config if not self.talos_json: self.talos_json = os.path.join(self.talos_path, 'talos.json') self.talos_json_config = parse_config_file(self.talos_json) self.info(pprint.pformat(self.talos_json_config)) return self.talos_json_config
def read_buildbot_config(self): c = self.config if not c.get("buildbot_json_path"): # If we need to fail out, add postflight_read_buildbot_config() self.info("buildbot_json_path is not set. Skipping...") else: # TODO try/except? self.buildbot_config = parse_config_file(c["buildbot_json_path"]) self.info(pprint.pformat(self.buildbot_config))
def read_buildbot_config(self): c = self.config if not c.get("buildbot_json_path"): # If we need to fail out, add postflight_read_buildbot_config() self.info("buildbot_json_path is not set. Skipping...") else: # TODO try/except? self.buildbot_config = parse_config_file(c['buildbot_json_path']) self.info(json.dumps(self.buildbot_config, indent=4))
def read_buildbot_config(self): c = self.config if not c.get("buildbot_json_path"): # If we need to fail out, add postflight_read_buildbot_config() self.info("buildbot_json_path is not set. Skipping...") else: # TODO try/except? self.buildbot_config = parse_config_file(c['buildbot_json_path']) self.info("Using buildbot properties:") self.info(json.dumps(self.buildbot_config, indent=4))
def _read_tree_config(self): """Reads an in-tree config file""" dirs = self.query_abs_dirs() test_install_dir = dirs.get('abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')) tree_config_path = os.path.join(test_install_dir, 'config', 'mozharness_config.py') if os.path.isfile(tree_config_path): self.tree_config.update(parse_config_file(tree_config_path)) self.dump_config(file_path=os.path.join(dirs['abs_log_dir'], 'treeconfig.json'), config=self.tree_config) self.tree_config.lock()
def query_talos_json_config(self): if self.talos_json_config: return self.talos_json_config dirs = self.query_abs_dirs() self.talos_json = self.download_file(self.talos_json_url, parent_dir=dirs['abs_talosdata_dir'], error_level=FATAL) self.talos_json_config = parse_config_file(self.talos_json) self.info(pprint.pformat(self.talos_json_config)) return self.talos_json_config
def query_talos_json_config(self): if self.talos_json_config: return self.talos_json_config dirs = self.query_abs_dirs() self.talos_json = self.download_file( self.talos_json_url, parent_dir=dirs['abs_talosdata_dir'], error_level=FATAL) self.talos_json_config = parse_config_file(self.talos_json) self.info(pprint.pformat(self.talos_json_config)) return self.talos_json_config
def query_talos_json_config(self): """Return the talos json config; download and read from the talos_json_url if need be.""" if self.talos_json_config: return self.talos_json_config if not self.talos_json: talos_json_url = self.query_talos_json_url() if not talos_json_url: self.fatal("Can't download talos_json without a talos_json_url!") self.download_talos_json() self.talos_json_config = parse_config_file(self.talos_json) self.info(pprint.pformat(self.talos_json_config)) return self.talos_json_config
def _read_tree_config(self): """Reads an in-tree config file""" dirs = self.query_abs_dirs() test_install_dir = dirs.get( 'abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')) if 'in_tree_config' in self.config: rel_tree_config_path = self.config['in_tree_config'] tree_config_path = os.path.join(test_install_dir, rel_tree_config_path) if not os.path.isfile(tree_config_path): self.fatal( "The in-tree configuration file '%s' does not exist!" "It must be added to '%s'. See bug 1035551 for more details." % (tree_config_path, os.path.join('gecko', 'testing', rel_tree_config_path))) try: self.tree_config.update(parse_config_file(tree_config_path)) except: msg = "There was a problem parsing the in-tree configuration file '%s'!" % \ os.path.join('gecko', 'testing', rel_tree_config_path) self.exception(message=msg, level=FATAL) self.dump_config(file_path=os.path.join(dirs['abs_log_dir'], 'treeconfig.json'), config=self.tree_config) if (self.buildbot_config and 'properties' in self.buildbot_config and self.buildbot_config['properties'].get('branch') == 'try'): try_config_path = os.path.join(test_install_dir, 'config', 'mozharness', 'try_arguments.py') known_try_arguments = parse_config_file(try_config_path) self.set_extra_try_arguments(known_try_arguments) self.tree_config.lock()
def _read_tree_config(self): """Reads an in-tree config file""" dirs = self.query_abs_dirs() test_install_dir = dirs.get('abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')) if 'in_tree_config' in self.config: rel_tree_config_path = self.config['in_tree_config'] tree_config_path = os.path.join(test_install_dir, rel_tree_config_path) if not os.path.isfile(tree_config_path): self.fatal("The in-tree configuration file '%s' does not exist!" "It must be added to '%s'. See bug 1035551 for more details." % (tree_config_path, os.path.join('gecko', 'testing', rel_tree_config_path))) try: self.tree_config.update(parse_config_file(tree_config_path)) except: msg = "There was a problem parsing the in-tree configuration file '%s'!" % \ os.path.join('gecko', 'testing', rel_tree_config_path) self.exception(message=msg, level=FATAL) self.dump_config(file_path=os.path.join(dirs['abs_log_dir'], 'treeconfig.json'), config=self.tree_config) if (self.buildbot_config and 'properties' in self.buildbot_config and self.buildbot_config['properties'].get('branch') == 'try'): try_config_path = os.path.join(test_install_dir, 'config', 'mozharness', 'try_arguments.py') known_try_arguments = parse_config_file(try_config_path) comments = self.buildbot_config['sourcestamp']['changes'][-1]['comments'] if not comments and 'try_syntax' in self.buildbot_config['properties']: # If we don't find try syntax in the usual place, check for it in an # alternate property available to tools using self-serve. comments = self.buildbot_config['properties']['try_syntax'] self.parse_extra_try_arguments(comments, known_try_arguments) self.tree_config.lock()
def query_talos_json_config(self): """Return the talos json config; download and read from the talos_json_url if need be.""" if self.talos_json_config: return self.talos_json_config if not self.talos_json: talos_json_url = self.query_talos_json_url() if not talos_json_url: self.fatal( "Can't download talos_json without a talos_json_url!") self.download_talos_json() self.talos_json_config = parse_config_file(self.talos_json) self.info(pprint.pformat(self.talos_json_config)) return self.talos_json_config
def checkout_gaia_l10n(self): if not self.config.get("gaia_languages_file"): self.info("Skipping checkout_gaia_l10n because no gaia language file was specified.") return l10n_config = self.load_gecko_config().get("gaia", {}).get("l10n") if not l10n_config: self.fatal("gaia.l10n is required in the gecko config when --gaia-languages-file is specified.") abs_work_dir = self.query_abs_dirs()["abs_work_dir"] languages_file = os.path.join(abs_work_dir, "gaia", self.config["gaia_languages_file"]) l10n_base_dir = self.query_abs_dirs()["gaia_l10n_base_dir"] self.pull_gaia_locale_source(l10n_config, parse_config_file(languages_file).keys(), l10n_base_dir)
def checkout_gaia_l10n(self): if not self.config.get('gaia_languages_file'): self.info('Skipping checkout_gaia_l10n because no gaia language file was specified.') return l10n_config = self.load_goanna_config().get('gaia', {}).get('l10n') if not l10n_config: self.fatal("gaia.l10n is required in the goanna config when --gaia-languages-file is specified.") abs_work_dir = self.query_abs_dirs()['abs_work_dir'] languages_file = os.path.join(abs_work_dir, 'gaia', self.config['gaia_languages_file']) l10n_base_dir = self.query_abs_dirs()['gaia_l10n_base_dir'] self.pull_gaia_locale_source(l10n_config, parse_config_file(languages_file).keys(), l10n_base_dir)
def checkout_gaia_l10n(self): if not self.config.get('gaia_languages_file'): self.info('Skipping checkout_gaia_l10n because no gaia language file was specified.') return l10n_config = self.load_gecko_config().get('gaia', {}).get('l10n') if not l10n_config: self.fatal("gaia.l10n is required in the gecko config when --gaia-languages-file is specified.") abs_work_dir = self.query_abs_dirs()['abs_work_dir'] languages_file = os.path.join(abs_work_dir, 'gaia', self.config['gaia_languages_file']) l10n_base_dir = self.query_abs_dirs()['gaia_l10n_base_dir'] self.pull_gaia_locale_source(l10n_config, parse_config_file(languages_file).keys(), l10n_base_dir)
def test_dump_config_hierarchy_valid_files_len(self): try: self.s = script.BaseScript( initial_config_file='test/test.json', option_args=['--cfg', 'test/test_override.py,test/test_override2.py'], config={'dump_config_hierarchy': True} ) except SystemExit: local_cfg_files = parse_config_file('test_logs/localconfigfiles.json') # first let's see if the correct number of config files were # realized self.assertEqual( len(local_cfg_files), 4, msg="--dump-config-hierarchy dumped wrong number of config files" )
def _read_tree_config(self): """Reads an in-tree config file""" dirs = self.query_abs_dirs() test_install_dir = dirs.get( 'abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')) tree_config_path = os.path.join(test_install_dir, 'config', 'mozharness_config.py') if os.path.isfile(tree_config_path): self.tree_config.update(parse_config_file(tree_config_path)) self.dump_config(file_path=os.path.join(dirs['abs_log_dir'], 'treeconfig.json'), config=self.tree_config) self.tree_config.lock()
def read_buildbot_config(self): c = self.config if not c.get("buildbot_json_path"): # If we need to fail out, add postflight_read_buildbot_config() self.info("buildbot_json_path is not set. Skipping...") else: # TODO try/except? self.buildbot_config = parse_config_file(c['buildbot_json_path']) buildbot_properties = copy.deepcopy(self.buildbot_config.get('properties', {})) if 'commit_titles' in buildbot_properties: # Remove the commit messages since they can cause false positives with # Treeherder log parsers. Eg: "Bug X - Fix TEST-UNEPXECTED-FAIL ...". del buildbot_properties['commit_titles'] self.info("Using buildbot properties:") self.info(json.dumps(buildbot_properties, indent=4))
def parse_locales_file(self, locales_file): locales = [] c = self.config platform = c.get("locales_platform", None) if locales_file.endswith('json'): locales_json = parse_config_file(locales_file) self.locale_dict = {} for locale in locales_json.keys(): if platform and platform not in locales_json[locale]['platforms']: continue locales.append(locale) self.locale_dict[locale] = locales_json[locale]['revision'] else: locales = self.read_from_file(locales_file).split() return locales
def query_release_config(self): if self.release_config: return self.release_config c = self.config dirs = self.query_abs_dirs() if c.get("release_config_file"): self.info("Getting release config from %s..." % c["release_config_file"]) rc = None try: rc = parse_config_file(os.path.join(dirs['abs_work_dir'], c["release_config_file"]), config_dict_name="releaseConfig") except IOError: self.fatal("Release config file %s not found!" % c["release_config_file"]) except RuntimeError: self.fatal("Invalid release config file %s!" % c["release_config_file"]) self.release_config['version'] = rc['version'] self.release_config['buildnum'] = rc['buildNumber'] self.release_config['ftp_server'] = rc['stagingServer'] self.release_config['ftp_user'] = c.get('ftp_user', rc['hgUsername']) self.release_config['ftp_ssh_key'] = c.get('ftp_ssh_key', rc['hgSshKey']) self.release_config['aus_server'] = rc['ausHost'] self.release_config['aus_user'] = rc['ausUser'] self.release_config['aus_ssh_key'] = c.get( 'aus_ssh_key', '~/.ssh/%s' % rc['ausSshKey']) else: self.info("No release config file; using default config.") for key in ( 'version', 'buildnum', 'ftp_server', 'ftp_user', 'ftp_ssh_key', 'aus_server', 'aus_user', 'aus_ssh_key', ): self.release_config[key] = c[key] self.info("Release config:\n%s" % self.release_config) return self.release_config
def test_dump_config_hierarchy_keys_unique_and_valid(self): try: self.s = script.BaseScript( initial_config_file='test/test.json', option_args=['--cfg', 'test/test_override.py,test/test_override2.py'], config={'dump_config_hierarchy': True} ) except SystemExit: local_cfg_files = parse_config_file('test_logs/localconfigfiles.json') # now let's see if only unique items were added from each config t_override = local_cfg_files.get('test/test_override.py', {}) self.assertTrue( t_override.get('keep_string') == "don't change me" and len(t_override.keys()) == 1, msg="--dump-config-hierarchy dumped wrong keys/value for " "`test/test_override.py`. There should only be one " "item and it should be unique to all the other " "items in test_log/localconfigfiles.json." )
def _update_build_variant(self, rw_config, variant='artifact'): """ Intended for use in _pre_config_lock """ c = self.config variant_cfg_path, _ = BuildOptionParser.find_variant_cfg_path( '--custom-build-variant-cfg', variant, rw_config.config_parser) if not variant_cfg_path: self.fatal( 'Could not find appropriate config file for variant %s' % variant) # Update other parts of config to keep dump-config accurate # Only dump-config is affected because most config info is set during # initial parsing variant_cfg_dict = parse_config_file(variant_cfg_path) rw_config.all_cfg_files_and_dicts.append( (variant_cfg_path, variant_cfg_dict)) c.update({ 'build_variant': variant, 'config_files': c['config_files'] + [variant_cfg_path] }) self.info("Updating self.config with the following from {}:".format( variant_cfg_path)) self.info(pprint.pformat(variant_cfg_dict)) c.update(variant_cfg_dict) c['forced_artifact_build'] = True # Bug 1231320 adds MOZHARNESS_ACTIONS in TaskCluster tasks to override default_actions # We don't want that when forcing an artifact build. if rw_config.volatile_config['actions']: self.info("Updating volatile_config to include default_actions " "from {}.".format(variant_cfg_path)) # add default actions in correct order combined_actions = [] for a in rw_config.all_actions: if a in c['default_actions'] or a in rw_config.volatile_config[ 'actions']: combined_actions.append(a) rw_config.volatile_config['actions'] = combined_actions self.info("Actions in volatile_config are now: {}".format( rw_config.volatile_config['actions'])) # replace rw_config as well to set actions as in BaseScript rw_config.set_config(c, overwrite=True) rw_config.update_actions() self.actions = tuple(rw_config.actions) self.all_actions = tuple(rw_config.all_actions)
def query_talos_json_config(self): """Return the talos json config; download and read from the talos_json_url if need be.""" if self.talos_json_config: return self.talos_json_config c = self.config if not c['use_talos_json']: return if not c['suite']: self.fatal("To use talos_json, you must define use_talos_json, suite.") return if not self.talos_json: talos_json_url = self.query_talos_json_url() if not talos_json_url: self.fatal("Can't download talos_json without a talos_json_url!") self.download_talos_json() self.talos_json_config = parse_config_file(self.talos_json) self.info(pprint.pformat(self.talos_json_config)) return self.talos_json_config
def parse_locales_file(self, locales_file): locales = [] c = self.config platform = c.get("locales_platform", None) if locales_file.endswith('json'): locales_json = parse_config_file(locales_file) for locale in locales_json.keys(): if isinstance(locales_json[locale], dict): if platform and platform not in locales_json[locale]['platforms']: continue self.l10n_revisions[locale] = locales_json[locale]['revision'] else: # some other way of getting this? self.l10n_revisions[locale] = 'default' locales.append(locale) else: locales = self.read_from_file(locales_file).split() return locales
def query_release_config(self): if self.release_config: return self.release_config c = self.config dirs = self.query_abs_dirs() if c.get("release_config_file"): self.info("Getting release config from %s..." % c["release_config_file"]) rc = None try: rc = parse_config_file( os.path.join(dirs["abs_work_dir"], c["release_config_file"]), config_dict_name="releaseConfig" ) except IOError: self.fatal("Release config file %s not found!" % c["release_config_file"]) except RuntimeError: self.fatal("Invalid release config file %s!" % c["release_config_file"]) self.release_config["version"] = rc["version"] self.release_config["buildnum"] = rc["buildNumber"] self.release_config["old_version"] = rc["oldVersion"] self.release_config["old_buildnum"] = rc["oldBuildNumber"] self.release_config["ftp_server"] = rc["stagingServer"] self.release_config["ftp_user"] = c.get("ftp_user", rc["hgUsername"]) self.release_config["ftp_ssh_key"] = c.get("ftp_ssh_key", rc["hgSshKey"]) self.release_config["aus_server"] = rc["ausHost"] self.release_config["aus_user"] = rc["ausUser"] self.release_config["aus_ssh_key"] = c.get("aus_ssh_key", "~/.ssh/%s" % rc["ausSshKey"]) else: self.info("No release config file; using default config.") for key in ( "version", "buildnum", "old_version", "old_buildnum", "ftp_server", "ftp_user", "ftp_ssh_key", "aus_server", "aus_user", "aus_ssh_key", ): self.release_config[key] = c[key] self.info("Release config:\n%s" % self.release_config) return self.release_config
def _update_build_variant(self, rw_config, variant='artifact'): """ Intended for use in _pre_config_lock """ c = self.config variant_cfg_path, _ = BuildOptionParser.find_variant_cfg_path( '--custom-build-variant-cfg', variant, rw_config.config_parser ) if not variant_cfg_path: self.fatal('Could not find appropriate config file for variant %s' % variant) # Update other parts of config to keep dump-config accurate # Only dump-config is affected because most config info is set during # initial parsing variant_cfg_dict = parse_config_file(variant_cfg_path) rw_config.all_cfg_files_and_dicts.append((variant_cfg_path, variant_cfg_dict)) c.update({ 'build_variant': variant, 'config_files': c['config_files'] + [variant_cfg_path] }) self.info("Updating self.config with the following from {}:".format(variant_cfg_path)) self.info(pprint.pformat(variant_cfg_dict)) c.update(variant_cfg_dict) c['forced_artifact_build'] = True # Bug 1231320 adds MOZHARNESS_ACTIONS in TaskCluster tasks to override default_actions # We don't want that when forcing an artifact build. if rw_config.volatile_config['actions']: self.info("Updating volatile_config to include default_actions " "from {}.".format(variant_cfg_path)) # add default actions in correct order combined_actions = [] for a in rw_config.all_actions: if a in c['default_actions'] or a in rw_config.volatile_config['actions']: combined_actions.append(a) rw_config.volatile_config['actions'] = combined_actions self.info("Actions in volatile_config are now: {}".format( rw_config.volatile_config['actions']) ) # replace rw_config as well to set actions as in BaseScript rw_config.set_config(c, overwrite=True) rw_config.update_actions() self.actions = tuple(rw_config.actions) self.all_actions = tuple(rw_config.all_actions)
def pull(self): """ Clone gaia and gecko locale repos """ languages_file = self.config['gaia_languages_file'] l10n_base_dir = self.query_abs_dirs()['gaia_l10n_base_dir'] l10n_config = { 'root': self.config['gaia_l10n_root'], 'vcs': self.config['gaia_l10n_vcs'], } self.pull_gaia_locale_source(l10n_config, parse_config_file(languages_file).keys(), l10n_base_dir) self.pull_locale_source() gecko_locales = self.query_locales() # populate b2g/overrides, which isn't in gecko atm dirs = self.query_abs_dirs() for locale in gecko_locales: self.mkdir_p(os.path.join(dirs['abs_l10n_dir'], locale, 'b2g', 'chrome', 'overrides')) self.copytree(os.path.join(dirs['abs_l10n_dir'], locale, 'mobile', 'overrides'), os.path.join(dirs['abs_l10n_dir'], locale, 'b2g', 'chrome', 'overrides'), error_level=FATAL)
def test_dump_config_hierarchy_matches_self_config(self): try: ###### # we need temp_cfg because self.s will be gcollected (NoneType) by # the time we get to SystemExit exception # temp_cfg will differ from self.s.config because of # 'dump_config_hierarchy'. we have to make a deepcopy because # config is a locked dict temp_s = script.BaseScript( initial_config_file="test/test.json", option_args=[ "--cfg", "test/test_override.py,test/test_override2.py" ], ) from copy import deepcopy temp_cfg = deepcopy(temp_s.config) temp_cfg.update({"dump_config_hierarchy": True}) ###### self.s = script.BaseScript( initial_config_file="test/test.json", option_args=[ "--cfg", "test/test_override.py,test/test_override2.py" ], config={"dump_config_hierarchy": True}, ) except SystemExit: local_cfg_files = parse_config_file( "test_logs/localconfigfiles.json") # finally let's just make sure that all the items added up, equals # what we started with: self.config target_cfg = {} for cfg_file in local_cfg_files: target_cfg.update(local_cfg_files[cfg_file]) self.assertEqual( target_cfg, temp_cfg, msg="all of the items (combined) in each cfg file dumped via " "--dump-config-hierarchy does not equal self.config ", )
def _checkout_gaia_l10n(self): # Checkout gaia l10n try: dirs = self.query_abs_dirs() config_json_path = os.path.join(dirs["abs_src_dir"], "b2g", "config", "desktop", "config.json") self.info("loading %s" % config_json_path) config_json = json.load(open(config_json_path)) self.debug("got %s" % config_json) l10n_config = config_json["gaia"]["l10n"] languages_file = os.path.join(dirs["abs_src_dir"], "gaia/locales/languages_all.json") l10n_base_dir = os.path.join(dirs["abs_work_dir"], "build-gaia-l10n") # Setup the environment for the gaia build system to find the locales env = self.query_env() env["LOCALE_BASEDIR"] = l10n_base_dir env["LOCALES_FILE"] = languages_file self.pull_gaia_locale_source(l10n_config, parse_config_file(languages_file).keys(), l10n_base_dir) except Exception: self.exception("failed to clone gaia l10n repos") raise
def _pre_config_lock(self, rw_config): # This is a workaround for legacy compatibility with the original # hgtool.py. # # Since we need to read the buildbot json props, as well as parse # additional commandline arguments that aren't specified via # options, we call this function before locking the config. # # rw_config is the BaseConfig object that parsed the options; # self.config is the soon-to-be-locked runtime configuration. # # This is a powerful way to hack the config before locking; # we need to be careful not to abuse it. args = rw_config.args c = self.config if c.get('vcs') is None: self.fatal("Must specify --vcs!\n\n%s" % rw_config.config_parser.format_help()) if c.get('vcs_repo') is None: if len(args) not in (1, 2): self.fatal("""Invalid number of arguments! You need to either specify --repo or specify it after the options: %s""" % rw_config.config_parser.get_usage()) self.config['vcs_repo'] = args[0] if len(args) == 2: self.config['vcs_dest'] = args[1] elif not self.config.get('vcs_dest'): self.config['vcs_dest'] = os.path.basename(self.config['vcs_repo']) # This is a buildbot-specific props file. if self.config.get('vcs_propsfile'): js = parse_config_file(self.config['vcs_propsfile']) if self.config.get('vcs_revision') is None: self.config['vcs_revision'] = js['sourcestamp']['revision'] if self.config.get('vcs_branch') is None: self.config['vcs_branch'] = js['sourcestamp']['branch']
def _pre_config_lock(self, rw_config): # This is a workaround for legacy compatibility with the original # hgtool.py. # # Since we need to read the buildbot json props, as well as parse # additional commandline arguments that aren't specified via # options, we call this function before locking the config. # # rw_config is the BaseConfig object that parsed the options; # self.config is the soon-to-be-locked runtime configuration. # # This is a powerful way to hack the config before locking; # we need to be careful not to abuse it. args = rw_config.args c = self.config if c.get('vcs') is None: self.fatal("Must specify --vcs!\n\n%s" % \ rw_config.config_parser.format_help()) if c.get('vcs_repo') is None: if len(args) not in (1, 2): self.fatal("""Invalid number of arguments! You need to either specify --repo or specify it after the options: %s""" % rw_config.config_parser.get_usage()) self.config['vcs_repo'] = args[0] if len(args) == 2: self.config['vcs_dest'] = args[1] elif not self.config.get('vcs_dest'): self.config['vcs_dest'] = os.path.basename(self.config['vcs_repo']) # This is a buildbot-specific props file. if self.config.get('vcs_propsfile'): js = parse_config_file(self.config['vcs_propsfile']) if self.config.get('vcs_revision') is None: self.config['vcs_revision'] = js['sourcestamp']['revision'] if self.config.get('vcs_branch') is None: self.config['vcs_branch'] = js['sourcestamp']['branch']
# ***** END LICENSE BLOCK ***** ''' Compile jinja2 templates with variables passed in from a mozharness config. ''' import jinja2 from mozharness.base.config import parse_config_file def compile_template(template_vars, template_file): templateLoader = jinja2.FileSystemLoader(searchpath="/") templateEnv = jinja2.Environment(loader=templateLoader) template = templateEnv.get_template(template_file) return template.render(template_vars) if __name__ == '__main__': import os import sys if len(sys.argv) < 3: print('Usage: compile_mozharness_config.py <jinja2 template>' ' <mozharness config ... >') sys.exit() template_file = os.path.abspath(sys.argv[1]) config_vars = {} for moz_config in sys.argv[2:]: config_vars.update(parse_config_file(moz_config)) print(compile_template(config_vars, template_file))