def _extractConfig(self, configurationsDictionary): BasePersistenceHandler._extractConfig(self, configurationsDictionary) if ("uniqueresourceid" not in self.config): self.config["uniqueresourceid"] = False else: self.config["uniqueresourceid"] = common.str2bool(self.config["uniqueresourceid"]) if ("onduplicateupdate" not in self.config): self.config["onduplicateupdate"] = False else: self.config["onduplicateupdate"] = common.str2bool(self.config["onduplicateupdate"])
def process_config_file(self, path_config_file): self.options = {} self.configs = {} with open(path_config_file) as fp: for line in fp: option = line.strip().split(":") if len(option) < 2 or option[0][0] == '#': continue if option[0][:6] == "config": self.cont_configs += 1 self.configs[option[0]] = self.Config(option[1]) elif option[0] == "class-map": self.options[option[0]] = option[1].split(",") else: self.options[option[0]] = option[1] # numpy random generator if int(self.options['replications']) == 1: self.options['numpy_rng'] = np.random.RandomState(89677) else: self.options['numpy_rng'] = np.random.RandomState(None) self.options['execution-name'] = self.options['dataset-name-file'] + ";" + \ self.options['experiment-name'] self.options['input-size'] = int(self.options['input-size']) self.options['batch-size'] = int(self.options['batch-size']) self.options['replications'] = int(self.options['replications']) self.options['pretrain-epochs'] = int( self.options['pretrain-epochs']) self.options['train-epochs'] = int(self.options['train-epochs']) self.options['learning-finetune'] = float( self.options['learning-finetune']) self.options['learning-pretrain'] = float( self.options['learning-pretrain']) self.options['save_pretrain_new_representation'] = \ str2bool(self.options['save_pretrain_new_representation']) self.options['save_train_new_representation'] = \ str2bool(self.options['save_train_new_representation']) self.options['save_valid_new_representation'] = \ str2bool(self.options['save_valid_new_representation']) self.options['save_test_new_representation'] = \ str2bool(self.options['save_test_new_representation']) self.options['activation_obj'] = get_activation_function( self.options['activation']) self.options[ 'autoencoder-in-activation_obj'] = get_activation_function( self.options['autoencoder-in-activation']) self.options[ 'autoencoder-reconstruction-activation_obj'] = get_activation_function( self.options['autoencoder-reconstruction-activation'])
def _extractConfig(self, configurationsDictionary): BasePersistenceHandler._extractConfig(self, configurationsDictionary) if ("uniqueresourceid" not in self.config): self.config["uniqueresourceid"] = False else: self.config["uniqueresourceid"] = common.str2bool( self.config["uniqueresourceid"]) if ("onduplicateupdate" not in self.config): self.config["onduplicateupdate"] = False else: self.config["onduplicateupdate"] = common.str2bool( self.config["onduplicateupdate"])
def __init__(self, config_file, overrides=None, filters="all", checks="all"): """ Expands the parent init by adding config expansion, overrides handling and sanity checks :param config_path: the path to the configuration file to load :param overrides: An object with override for the configuration """ super(PromoterLegacyConfig, self).__init__(config_file) config = {} if filters == "all": filters = ['overrides', 'expand', 'experimental'] if 'overrides' in filters: config = self.handle_overrides(self._config, overrides=overrides) if 'expand' in filters: config = self.expand_config(config) if not self.sanity_check(config, self._file_config, checks=checks): self.log.error("Error in configuration file {}" "".format(config_file)) raise ConfigError # Add experimental configuration if activated if 'experimental' in filters \ and str2bool(config.get('experimental', 'false')): config = self.experimental_config(config) # reLoad keys as config attributes for key, value in config.items(): setattr(self, key, value)
def parse_args(args): """ Parse script arguments. :return: Parsed args for assignment """ parser = argparse.ArgumentParser() parser.add_argument( '--build-directory', required=True, help="Top level directory that stores all the cloned repositories.", action='store') parser.add_argument('--manifest-file', required=True, help="The file path of manifest", action='store') parser.add_argument( '--parameter-file', help= "The jenkins parameter file that will used for succeeding Jenkins job", action='store', default="downstream_parameters") parser.add_argument( '--git-credential', help="Git URL and credential for CI services: <URL>,<Credentials>", action='append', default=None) parser.add_argument('--sudo-credential', help="username:password pair for sudo user", action='store', default=None) parser.add_argument('--jobs', help="Number of build jobs to run in parallel", default=-1, type=int, action="store") parser.add_argument('--is-official-release', default="false", help="Whether this release is official", action="store") parser.add_argument('--force', help="Overwrite a directory even if it exists", action="store_true") parsed_args = parser.parse_args(args) parsed_args.is_official_release = common.str2bool( parsed_args.is_official_release) return parsed_args
def _extractConfig(self, configurationsDictionary): BasePersistenceHandler._extractConfig(self, configurationsDictionary) if ("selectcachesize" not in self.config): raise KeyError("Parameter 'selectcachesize' must be specified.") else: self.config["selectcachesize"] = int( self.config["selectcachesize"]) if ("onduplicateupdate" not in self.config): self.config["onduplicateupdate"] = False else: self.config["onduplicateupdate"] = common.str2bool( self.config["onduplicateupdate"])
def __init__(self, rule_cfg: dict, global_cfg: dict): patterns = rule_cfg.get('patterns', {}) upload = rule_cfg['upload'] self.regex_map = {k: re.compile(p) for k, p in patterns.items()} self.upload_repo = upload self.deps_repos = rule_cfg.get('dependency-repositories', []) self.adjust_version = str2bool(rule_cfg.get('adjust-srcpkg-version')) # get archs if specified, otherwise take all archs in the uploaded repo self.archs = rule_cfg.get( 'built-architectures', list(global_cfg['repositories'][upload].keys()))
def parse_args(args): """ Parse script arguments. :return: Parsed args for assignment """ parser = argparse.ArgumentParser() parser.add_argument('--build-directory', required=True, help="Top level directory that stores all the cloned repositories.", action='store') parser.add_argument('--manifest-file', required=True, help="The file path of manifest", action='store') parser.add_argument('--parameter-file', help="The jenkins parameter file that will used for succeeding Jenkins job", action='store', default="downstream_parameters") parser.add_argument('--git-credential', required=True, help="Git URL and credential for CI services: <URL>,<Credentials>", action='append', default=None) parser.add_argument('--sudo-credential', help="username:password pair for sudo user", action='store', default=None) parser.add_argument('--jobs', help="Number of build jobs to run in parallel", default=-1, type=int, action="store") parser.add_argument('--is-official-release', default="false", help="Whether this release is official", action="store") parser.add_argument('--force', help="Overwrite a directory even if it exists", action="store_true") parsed_args = parser.parse_args(args) parsed_args.is_official_release = common.str2bool(parsed_args.is_official_release) return parsed_args
def parse_command_line(args): """ Parse script arguments. :return: Parsed args for assignment """ parser = argparse.ArgumentParser() parser.add_argument("--repo-dir", required=True, help="the directory of repository", action="store") parser.add_argument('--is-official-release', default="false", help="Whether this release is official", action="store") parsed_args = parser.parse_args(args) parsed_args.is_official_release = common.str2bool(parsed_args.is_official_release) return parsed_args
def parse_command_line(args): """ Parse script arguments. :return: Parsed args for assignment """ parser = argparse.ArgumentParser() parser.add_argument("--repo-dir", required=True, help="the directory of repository", action="store") parser.add_argument('--is-official-release', default="false", help="Whether this release is official", action="store") parsed_args = parser.parse_args(args) parsed_args.is_official_release = common.str2bool( parsed_args.is_official_release) return parsed_args
def parse_command_line(args): """ Parse script arguments. :return: Parsed args for assignment """ parser = argparse.ArgumentParser() parser.add_argument("--manifest", required=True, help="the path of manifest file", action="store") parser.add_argument("--builddir", required=True, help="destination for checked out repositories", action="store") parser.add_argument("--force", help="use destination dir, even if it exists", action="store_true") parser.add_argument("--git-credential", required=True, help="Git credentials for CI services", action="append") parser.add_argument('--jobs', help="Number of build jobs to run in parallel", default=-1, type=int, action="store") parser.add_argument('--is-official-release', default="false", help="Whether this release is official", action="store") parsed_args = parser.parse_args(args) parsed_args.is_official_release = common.str2bool( parsed_args.is_official_release) return parsed_args
def parse_command_line(args): """ Parse script arguments. :return: Parsed args for assignment """ parser = argparse.ArgumentParser() parser.add_argument("--manifest", required=True, help="the path of manifest file", action="store") parser.add_argument("--builddir", required=True, help="destination for checked out repositories", action="store") parser.add_argument("--force", help="use destination dir, even if it exists", action="store_true") parser.add_argument("--git-credential", required=True, help="Git credentials for CI services", action="append") parser.add_argument('--jobs', help="Number of build jobs to run in parallel", default=-1, type=int, action="store") parser.add_argument('--is-official-release', default="false", help="Whether this release is official", action="store") parsed_args = parser.parse_args(args) parsed_args.is_official_release = common.str2bool(parsed_args.is_official_release) return parsed_args
def test_str2bool_false(self): self.assertFalse(str2bool("False")) self.assertFalse(str2bool(type("Whatever", (), {})))
parser.add_argument("-h", "--help", action="help", help="show this help message and exit") parser.add_argument("-v", "--verbose", metavar="on/off", help="enable/disable information messages on screen") parser.add_argument("-g", "--logging", metavar="on/off", help="enable/disable logging on file") parser.add_argument("-p", "--loggingPath", metavar="path", help="define path of logging file") parser.add_argument("-m", "--loggingFileMode", choices=["overwrite", "append"], help="define the mode in which the logging file has to be opened") args = parser.parse_args() # Add directory of the configuration file to sys.path before import crawler, so that the module can easily # be overrided by placing the modified file in a subfolder, along with the configuration file itself configFileDir = os.path.dirname(os.path.abspath(args.configFilePath)) sys.path = [configFileDir] + sys.path import crawler # Load configurations config = common.loadConfig(args.configFilePath) if (args.verbose is not None): config["global"]["echo"]["mandatory"]["verbose"] = common.str2bool(args.verbose) if (args.logging is not None): config["global"]["echo"]["mandatory"]["logging"] = common.str2bool(args.logging) if (args.loggingPath is not None): config["global"]["echo"]["mandatory"]["loggingpath"] = args.loggingPath if (args.loggingFileMode is not None): config["global"]["echo"]["mandatory"]["loggingfilemode"] = args.loggingFileMode # Connect to server processID = os.getpid() server = common.NetworkHandler() server.connect(config["global"]["connection"]["address"], config["global"]["connection"]["port"]) server.send({"command": "CONNECT", "type": "client", "processid": processID}) message = server.recv() if (message["command"] == "REFUSED"): sys.exit("ERROR: %s" % message["reason"]) else: clientID = message["clientid"] # Configure echoing echo = common.EchoHandler(config["client"]["echo"], "client%s@%s[%s].log" % (clientID, socket.gethostname(), config["global"]["connection"]["port"]))
def expand_config(self, config): # Mangling, diverging and derivatives config['dlrnauth_username'] = \ config.pop('username', self.defaults['dlrnauth_username']) config['dlrnauth_password'] = os.environ.get('DLRNAPI_PASSWORD', None) config['distro_name'] = \ config.get('distro_name', self.defaults['distro_name']).lower() config['distro_version'] = \ config.get('distro_version', self.defaults['distro_version']).lower() config['release'] = \ config.get('release', self.defaults['release']).lower() config['build_method'] = \ config.get('build_method', self.defaults['build_method']).lower() config['container_preffix'] = \ config.get('container_preffix', self.defaults['container_preffix']).lower() config['distro'] = "{}{}".format(config['distro_name'], config['distro_version']) config['latest_hashes_count'] = \ int(config.get('latest_hashes_count', self.defaults['latest_hashes_count'])) if 'repo_url' not in config: config['repo_url'] = ("https://{}/{}-{}" "".format(self.defaults['dlrn_api_host'], config['distro'], config['release'])) if 'api_url' not in config: config['api_url'] = self.get_dlrn_api_url(config) if 'log_file' not in config: config['log_file'] = ("~/promoter_logs/{}_{}.log" "".format(config['distro'], config['release'])) if 'source_namespace' not in config: if config['release'] == "ussuri": source_namespace = "tripleou" else: source_namespace = "tripleo{}".format(config['release']) config['source_namespace'] = source_namespace if 'target_namespace' not in config: if config['release'] == "ussuri": target_namespace = "tripleou" else: target_namespace = "tripleo{}".format(config['release']) config['target_namespace'] = target_namespace config['containers_list_base_url'] = \ config.get('containers_list_base_url', self.defaults['containers_list_base_url']) config['containers_list_path'] = \ config.get('containers_list_path', self.defaults['containers_list_path']) config['containers_list_exclude_config'] = \ config.get('containers_list_exclude_config', self.defaults['containers_list_exclude_config']) config['log_file'] = os.path.expanduser(config['log_file']) config['log_level'] = \ config.get('log_level', self.defaults['log_level']) try: config['log_level'] = getattr(logging, config['log_level']) except AttributeError: self.log.error("unrecognized log level: %s, using default %s", config['log_level'], self.defaults.log_level) config['log_level'] = self.defaults.log_level config['allowed_clients'] = \ config.get('allowed_clients', self.defaults['allowed_clients']).split(',') config['dry_run'] = \ str2bool(config.get('dry_run', self.defaults['dry_run'])) config['manifest_push'] = \ str2bool(config.get('manifest_push', self.defaults['manifest_push'])) config['target_registries_push'] = \ str2bool(config.get('target_registries_push', self.defaults['target_registries_push'])) # Promotion criteria do not have defaults for target_name, job_list in config['promotion_criteria_map'].items(): criteria = set(list(job_list)) config['promotion_criteria_map'][target_name] = criteria config['qcow_server'] = config['overcloud_images']['qcow_servers'][ config['default_qcow_server']] return config
def _extractConfig(self, configurationsDictionary): BasePersistenceHandler._extractConfig(self, configurationsDictionary) if ("selectcachesize" not in self.config): raise KeyError("Parameter 'selectcachesize' must be specified.") else: self.config["selectcachesize"] = int(self.config["selectcachesize"]) if ("onduplicateupdate" not in self.config): self.config["onduplicateupdate"] = False else: self.config["onduplicateupdate"] = common.str2bool(self.config["onduplicateupdate"])
def test_str2bool_true(self): self.assertTrue(str2bool("yes")) self.assertTrue(str2bool("true")) self.assertTrue(str2bool("True")) self.assertTrue(str2bool("on")) self.assertTrue(str2bool("1"))
parser.add_argument( "-m", "--loggingFileMode", choices=["overwrite", "append"], help="define the mode in which the logging file has to be opened") args = parser.parse_args() # Add directory of the configuration file to sys.path before import serverlib, so that persistence and filter modules # can easily be overrided by placing the modified files in a subfolder, along with the configuration file itself configFileDir = os.path.dirname(os.path.abspath(args.configFilePath)) sys.path = [configFileDir] + sys.path import serverlib # Load configurations config = common.loadConfig(args.configFilePath) if (args.verbose is not None): config["global"]["echo"]["mandatory"]["verbose"] = common.str2bool( args.verbose) if (args.logging is not None): config["global"]["echo"]["mandatory"]["logging"] = common.str2bool( args.logging) if (args.loggingPath is not None): config["global"]["echo"]["mandatory"]["loggingpath"] = args.loggingPath if (args.loggingFileMode is not None): config["global"]["echo"]["mandatory"][ "loggingfilemode"] = args.loggingFileMode # Run server server = serverlib.ThreadedTCPServer(config) server.run()