def build_config(config_path: (str, list), log_dir: str, ckpt_path: str) -> [dict, str]: """ Function to initialise log directories, assert that checkpointed model is the right type and to parse the configuration for training :param config_path: list of str, path to config file :param log_dir: str, path to where training logs to be stored. :param ckpt_path: str, path where model is stored. :return: - config: a dictionary saving configuration - log_dir: the path of directory to save logs """ # init log directory log_dir = build_log_dir(log_dir) # check checkpoint path if ckpt_path != "": if not ckpt_path.endswith(".ckpt"): raise ValueError("checkpoint path should end with .ckpt") # load and backup config config = config_parser.load_configs(config_path) config_parser.save(config=config, out_dir=log_dir) return config, log_dir
def build_config(config_path: (str, list), log_dir: str, ckpt_path: str) -> [dict, str]: """ Function to create new directory to log directory to store results. :param config_path: string or list of strings, path of configuration files :param log_dir: string, path to store logs. :param ckpt_path: str, path where model is stored. :return: - config, configuration dictionary - log_dir, path of the directory for saving outputs """ # check ckpt_path if not ckpt_path.endswith(".ckpt"): raise ValueError( "checkpoint path should end with .ckpt, got {}".format(ckpt_path)) # init log directory log_dir = build_log_dir(log_dir) # load config if config_path == "": # use default config, which should be provided in the log folder config = config_parser.load_configs( "/".join(ckpt_path.split("/")[:-2]) + "/config.yaml") else: # use customized config logging.warning( "Using customized configuration." "The code might break if the config of the model doesn't match the saved model." ) config = config_parser.load_configs(config_path) return config, log_dir
def build_config(config_path: Union[str, List[str]], log_dir: str, exp_name: str, ckpt_path: str) -> Tuple[Dict, str, str]: """ Function to create new directory to log directory to store results. :param config_path: path of configuration files. :param log_dir: path of the log directory. :param exp_name: experiment name. :param ckpt_path: path where model is stored. :return: - config, configuration dictionary. - exp_name, path of the directory for saving outputs. """ # init log directory log_dir = build_log_dir(log_dir=log_dir, exp_name=exp_name) # replace the ~ with user home path ckpt_path = os.path.expanduser(ckpt_path) # load config if config_path == "": # use default config, which should be provided in the log folder config = config_parser.load_configs( "/".join(ckpt_path.split("/")[:-2]) + "/config.yaml") else: # use customized config logging.warning( "Using customized configuration." "The code might break if the config doesn't match the saved model." ) config = config_parser.load_configs(config_path) return config, log_dir, ckpt_path
def build_config(config_path: (str, list), log_root: str, log_dir: str, ckpt_path: str) -> [dict, str]: """ Function to create new directory to log directory to store results. :param config_path: string or list of strings, path of configuration files :param log_root: str, root of logs :param log_dir: string, path to store logs. :param ckpt_path: str, path where model is stored. :return: - config, configuration dictionary - log_dir, path of the directory for saving outputs """ # init log directory log_dir = build_log_dir(log_root=log_root, log_dir=log_dir) # replace the ~ with user home path ckpt_path = os.path.expanduser(ckpt_path) # load config if config_path == "": # use default config, which should be provided in the log folder config = config_parser.load_configs( "/".join(ckpt_path.split("/")[:-2]) + "/config.yaml") else: # use customized config logging.warning( "Using customized configuration." "The code might break if the config doesn't match the saved model." ) config = config_parser.load_configs(config_path) return config, log_dir, ckpt_path
def test_build_log_dir(): """ Test build_log_dir for default directory and custom directory """ # use default timestamp based directory log_dir = build_log_dir(log_dir="") head, tail = os.path.split(log_dir) assert head == "logs" pattern = re.compile("[0-9]{8}-[0-9]{6}") assert pattern.match(tail) # use custom directory log_dir = build_log_dir(log_dir="custom") head, tail = os.path.split(log_dir) assert head == "logs" assert tail == "custom"
def test_build_log_dir(log_dir: str, exp_name: str): built_log_dir = build_log_dir(log_dir=log_dir, exp_name=exp_name) head, tail = os.path.split(built_log_dir) assert head == log_dir if exp_name == "": # use default timestamp based directory pattern = re.compile("[0-9]{8}-[0-9]{6}") assert pattern.match(tail) else: # use custom directory assert tail == exp_name
def test_build_log_dir(log_root: str, log_dir: str): """ Test build_log_dir for default directory and custom directory """ built_log_dir = build_log_dir(log_root=log_root, log_dir=log_dir) head, tail = os.path.split(built_log_dir) assert head == log_root if log_dir == "": # use default timestamp based directory pattern = re.compile("[0-9]{8}-[0-9]{6}") assert pattern.match(tail) else: # use custom directory assert tail == log_dir
def build_config( config_path: (str, list), log_root: str, log_dir: str, ckpt_path: str, max_epochs: int = -1, ) -> [dict, str]: """ Function to initialise log directories, assert that checkpointed model is the right type and to parse the configuration for training. :param config_path: list of str, path to config file :param log_root: str, root of logs :param log_dir: str, path to where training logs to be stored. :param ckpt_path: str, path where model is stored. :param max_epochs: int, if max_epochs > 0, will use it to overwrite the configuration :return: - config: a dictionary saving configuration - log_dir: the path of directory to save logs """ # init log directory log_dir = build_log_dir(log_root=log_root, log_dir=log_dir) # check checkpoint path if ckpt_path != "": if not ckpt_path.endswith(".ckpt"): raise ValueError( f"checkpoint path should end with .ckpt, got {ckpt_path}") # load config config = config_parser.load_configs(config_path) # overwrite epochs and save_period if necessary if max_epochs > 0: config["train"]["epochs"] = max_epochs config["train"]["save_period"] = min(max_epochs, config["train"]["save_period"]) # backup config config_parser.save(config=config, out_dir=log_dir) # batch_size in original config corresponds to batch_size per GPU gpus = tf.config.experimental.list_physical_devices("GPU") config["train"]["preprocess"]["batch_size"] *= max(len(gpus), 1) return config, log_dir
def build_config( config_path: Union[str, List[str]], log_dir: str, exp_name: str, ckpt_path: str, max_epochs: int = -1, ) -> Tuple[Dict, str, str]: """ Function to initialise log directories, assert that checkpointed model is the right type and to parse the configuration for training. :param config_path: list of str, path to config file :param log_dir: path of the log directory :param exp_name: name of the experiment :param ckpt_path: path where model is stored. :param max_epochs: if max_epochs > 0, use it to overwrite the configuration :return: - config: a dictionary saving configuration - exp_name: the path of directory to save logs """ # init log directory log_dir = build_log_dir(log_dir=log_dir, exp_name=exp_name) # load config config = config_parser.load_configs(config_path) # replace the ~ with user home path ckpt_path = os.path.expanduser(ckpt_path) # overwrite epochs and save_period if necessary if max_epochs > 0: config["train"]["epochs"] = max_epochs config["train"]["save_period"] = min(max_epochs, config["train"]["save_period"]) # backup config config_parser.save(config=config, out_dir=log_dir) # batch_size in original config corresponds to batch_size per GPU gpus = tf.config.experimental.list_physical_devices("GPU") config["train"]["preprocess"]["batch_size"] *= max(len(gpus), 1) return config, log_dir, ckpt_path