Exemple #1
0
def _post_process_model_config(model_config, config, should_make_dir):
    """Method to post process the model section of the config"""

    general_config = config.general

    if not model_config.save_dir:
        model_config.save_dir = os.path.join(general_config.base_path,
                                             "models", general_config.id)
    elif model_config.save_dir[0] != "/":
        model_config.save_dir = os.path.join(general_config.base_path,
                                             model_config.save_dir)

    if should_make_dir:
        make_dir(path=model_config.save_dir)

    # model_config.load_path = os.path.join(general_config.base_path,
    #                                      "model", model_config.load_path)

    if not model_config.load_dir:
        model_config.load_dir = os.path.join(general_config.base_path,
                                             "models")
    elif model_config.save_dir[0] != "/":
        model_config.load_dir = os.path.join(general_config.base_path,
                                             "models", model_config.load_dir)

    for key in ["learning_rate", "eps"]:
        model_config.optim[key] = float(model_config.optim[key])

    return model_config
def _post_process_expert_policy_config(expert_policy_config, general_config,
                                       should_make_dir):
    # Method to post process the expert_policy section of the config
    default_params = {
        "name": "ppo",
        "num_timesteps": 1000000,
        "save_dir": "",
        "hidden_size": 64,
        "num_layers": 2,
        "num_cpu": 1,
        "should_convert_model": True
    }
    expert_policy_config = _copy_from_default_dict(expert_policy_config,
                                                   default_params)

    if (expert_policy_config.save_dir == ""):
        expert_policy_config.save_dir = os.path.join(general_config.base_path,
                                                     "model", "expert_policy")
    elif (expert_policy_config.save_dir[0] != "/"):
        expert_policy_config.save_dir = os.path.join(general_config.base_path,
                                                     "model", "expert_policy")

    if (should_make_dir):
        make_dir(path=expert_policy_config.save_dir)

    return expert_policy_config
Exemple #3
0
def _post_process_plot_config(plot_config, general_config):
    # Method to post process the plot section of the config
    if ("base_path" not in plot_config) or (plot_config.base_path == ""):
        plot_config.base_path = os.path.join(general_config.base_path,
                                             "plots", general_config.id)
        make_dir(path=plot_config.base_path)

    return plot_config
Exemple #4
0
def _post_process_plot_config(plot_config, general_config, should_make_dir):
    """Method to post process the plot section of the config"""
    if not plot_config.base_path:
        plot_config.base_path = os.path.join(general_config.base_path, "plots",
                                             general_config.id)
        if should_make_dir:
            make_dir(path=plot_config.base_path)

    return plot_config
def _post_process_log_config(log_config, general_config, should_make_dir):
    # Method to post process the log section of the config

    if ("file_path" not in log_config) or (log_config.file_path == ""):
        log_config.file_path = os.path.join(general_config.base_path, "logs",
                                            general_config.id)
        if (should_make_dir):
            make_dir(path=log_config.file_path)
        log_config.file_path = os.path.join(log_config.file_path, "log.txt")

    log_config.dir = log_config.file_path.rsplit("/", 1)[0]
    return log_config
Exemple #6
0
def _post_process_model_config(model_config, config):
    # Method to post process the net section of the config

    general_config = config.general
    dataset_config = config.dataset

    default_params = {
        "name": "baseline1",
        "batch_size": 100,
        "num_epochs": 1000,
        "persist_per_epoch": -1,
        "early_stopping_patience": 1,
        "save_dir": "",
        "should_load_model": False,
        "dropout_probability": 0.0,
        "embedding": {},
        "optimiser": {},
        "early_stopping": {}
    }

    for key in default_params:
        if key not in model_config:
            model_config[key] = default_params[key]

    if (model_config.save_dir == ""):
        model_config.save_dir = os.path.join(general_config.base_path, "net",
                                             general_config.id)
    elif (model_config.save_dir[0] != "/"):
        model_config.save_dir = os.path.join(general_config.base_path,
                                             model_config.save_dir)

    make_dir(path=model_config.save_dir)

    model_config.load_path = os.path.join(general_config.base_path, "net",
                                          general_config.id)

    for key in ["should_load_model"]:
        model_config[key] = _get_boolean_value(model_config[key])

    model_config.early_stopping = _post_process_early_stooping_config(
        model_config.early_stopping)
    model_config.embedding = _post_process_embedding_config(
        deepcopy(model_config.embedding), general_config, dataset_config)
    model_config.optimiser = _post_process_optimiser_config(
        deepcopy(model_config.optimiser))

    model_config.decoder = _post_process_decoder_config(
        deepcopy(model_config.decoder), model_config.encoder)

    return model_config
def _post_process_model_config(model_config, config, should_make_dir):
    # Method to post process the model section of the config

    general_config = config.general

    default_params = {
        "name": "baseline1",
        "should_train": False,
        "num_epochs": 1000,
        "persist_per_epoch": -1,
        "persist_best_model": False,
        "early_stopping_patience": 1,
        "save_dir": "",
        "should_load_model": False,
        "optimizer": Dict(),
        "imagination_model": Dict(),
        "expert_policy": Dict(),
        "modes": ["train", "val", "test"],
    }

    for key in default_params:
        if key not in model_config:
            model_config[key] = default_params[key]

    if (model_config.save_dir == ""):
        model_config.save_dir = os.path.join(general_config.base_path, "model",
                                             general_config.id)
    elif (model_config.save_dir[0] != "/"):
        model_config.save_dir = os.path.join(general_config.base_path,
                                             model_config.save_dir)

    if (should_make_dir):
        make_dir(path=model_config.save_dir)

    model_config.load_path = os.path.join(general_config.base_path, "model",
                                          model_config.load_path)

    for key in ["should_load_model"]:
        model_config[key] = _get_boolean_value(model_config[key])

    model_config.optimizer = _post_process_optimizer_config(
        deepcopy(model_config.optimizer))
    model_config.imagination_model = _post_process_imagination_model_config(
        deepcopy(model_config.imagination_model))
    model_config.expert_policy = _post_process_expert_policy_config(
        deepcopy(model_config.expert_policy), general_config, should_make_dir)

    return model_config
def make_remote_config(sample_config, app_id):
    remote_config = deepcopy(sample_config)
    remote_config.general.id = app_id
    # Log Params
    key = "file_path"
    remote_config.log[key] = os.path.join(remote_config.general.base_path,
                                          "logs", remote_config.general.id)
    remote_config.log[key] = os.path.join(remote_config.log[key], "log.txt")

    # Plot Params
    remote_config.plot.base_path = os.path.join(
        remote_config.general.base_path, "plots", remote_config.general.id)

    make_dir(remote_config.plot.base_path)

    return remote_config
Exemple #9
0
 def save(
     self,
     save_dir,
     epoch: Optional[int] = None,
     composition_fn: Optional[nn.Module] = None,
     representation_fn: Optional[nn.Module] = None,
     optimizers: Iterable[torch.optim.Optimizer] = None,
 ) -> None:
     """Persist the model weights in disk
     
     Arguments:
         save_dir {str} -- save location
     
     Keyword Arguments:
         epoch {Optional[int]} -- [description] (default: {None})
         composition_fn {Optional[nn.Module]} -- Graph Function (default: {None})
         representation_fn {Optional[nn.Module]} -- previously signature function (default: {None})
         optimizers {Iterable[torch.optim.Optimizer]} -- [description] (default: {None})
     """
     if epoch is None:
         epoch = 0
     path_to_save_model_at = os.path.join(
         save_dir,
         str(epoch),
     )
     make_dir(path_to_save_model_at)
     data = {
         "composition_fn": {
             "weights": composition_fn.weights,
             "weight_names": composition_fn.weight_names,
         },
         "representation_fn": {
             "weights": representation_fn.weights,
             "weight_names": representation_fn.weight_names,
         },
         "optimizers": [opt.state_dict() for opt in optimizers],
     }
     torch.save(data, os.path.join(path_to_save_model_at, "model.pt"))
     data = {"epoch": epoch}
     path_to_save_metadata_at = os.path.join(save_dir, )
     make_dir(path_to_save_metadata_at)
     torch.save(data, os.path.join(path_to_save_metadata_at, "metadata.pt"))
def _post_process_dataset_config(dataset_config, general_config,
                                 should_make_dir):
    # Method to post process the dataset section of the config
    if ("base_path" not in dataset_config) or (dataset_config.base_path == ""):
        dataset_config.base_path = os.path.join(general_config.base_path,
                                                "data")

    if (should_make_dir):
        make_dir(dataset_config.base_path)

    if ("name" not in dataset_config) or (dataset_config.name == ""):
        dataset_config.name = "HalfCheetah-v2"

    if (should_make_dir):
        make_dir(os.path.join(dataset_config.base_path, dataset_config.name))

    default_params = {
        "should_generate": False,
        "num_trajectories": 10000,
        "num_actions_per_trajectory": 1,
        "batch_size": 100,
        "buffer_size": 100,
        "num_workers": 2,
        "should_load": True,
        "sequence_length": 250,
        "imagination_length": 10,
    }

    dataset_config = _copy_from_default_dict(dataset_config, default_params)

    for key in ["should_generate"]:
        if key in dataset_config:
            dataset_config[key] = _get_boolean_value(dataset_config[key])

    dataset_config.split = _post_process_split_config(
        deepcopy(dataset_config.split))

    dataset_config.dataset_generation = _post_process_dataset_generation_config(
        deepcopy(dataset_config.dataset_generation))

    return dataset_config
Exemple #11
0
def _post_process_log_config(log_config, general_config):
    # Method to post process the log section of the config

    if ("file_path" not in log_config) or (log_config.file_path == ""):
        log_config.file_path = os.path.join(general_config.base_path,
                                            "logs", general_config.id)
        make_dir(path=log_config.file_path)
        log_config.file_path = os.path.join(log_config.file_path, "log.txt")

    log_config.dir = log_config.file_path.rsplit("/", 1)[0]

    key = "mongo_host"
    if (key not in log_config or log_config[key] == ""):
        log_config[key] = "127.0.0.1"

    key = "mongo_port"
    if (key not in log_config or log_config[key] == ""):
        log_config[key] = "8092"

    key = "mongo_db"
    if (key not in log_config or log_config[key] == ""):
        log_config[key] = "graphsum"

    return log_config
Exemple #12
0
def _post_process_logger_file_config(logger_file_config, general_config,
                                     should_make_dir):
    """Method to post process the file subsection of the logger section of the config"""

    if not logger_file_config.path:
        logger_file_config.path = os.path.join(general_config.base_path,
                                               "logs", general_config.id)
        if should_make_dir:
            make_dir(path=logger_file_config.path)
            make_dir(os.path.join(logger_file_config.path, "train"))
            make_dir(os.path.join(logger_file_config.path, "eval"))
        logger_file_config.path = os.path.join(logger_file_config.path,
                                               "log.txt")

    logger_file_config.dir = logger_file_config.path.rsplit("/", 1)[0]
    return logger_file_config
Exemple #13
0
def get_config_from_log(log):
    """Method to prepare the config for all downstream tasks"""
    boxed_config = get_config_box(log)
    boxed_config.general.base_path = os.path.dirname(
        os.path.realpath(__file__)).split("/codes")[0]

    boxed_config.logger.file.path = os.path.join(
        boxed_config.general.base_path, "logs", boxed_config.general.id)
    make_dir(path=boxed_config.logger.file.path)
    make_dir(os.path.join(boxed_config.logger.file.path, "train"))
    make_dir(os.path.join(boxed_config.logger.file.path, "eval"))
    boxed_config.logger.file.path = os.path.join(boxed_config.logger.file.path,
                                                 "log.txt")
    boxed_config.logger.file.dir = boxed_config.logger.file.path.rsplit(
        "/", 1)[0]

    return boxed_config