def main(params): config = io_utils.load_yaml(params["config"]) # prepare dataset D = cmf.get_dataset(params["dataset"]) dsets, L = cmf.get_loader(D, split=["test"], loader_configs=[config["test_loader"]], num_workers=params["num_workers"]) # Build network M = cmf.get_method(params["method"]) net = M(config, logger=None) net.load_checkpoint(params["checkpoint"], True) if config["model"]["use_gpu"]: net.gpu_mode() # Evaluating networks cmf.test(config, L["test"], net, -1, None, mode="Test")
def main(): # get parameters from cmd params = _get_argument_params() global M, dataset M = cmf.get_model(params["model_type"]) dataset = cmf.get_dataset(params["dataset"]) # loading configuration and setting environment config = io_utils.load_yaml(params["config_path"]) config = M.override_config_from_params(config, params) cmf.create_save_dirs(config["misc"]) # create loggers global logger logger = cmf.create_logger(config) # train network train(config)
parser.add_argument("--debug_mode", action="store_true", default=False, help="Train the model in debug mode.") params = vars(parser.parse_args()) print(json.dumps(params, indent=4)) return params if __name__ == "__main__": # load parameters params = _get_argument_params() global M, dataset M = cmf.get_model(params["model_type"]) dataset = cmf.get_dataset(params["dataset"]) # loading configuration and setting environment config = io_utils.load_yaml(params["config_path"]) config = M.override_config_from_params(config, params) cmf.create_save_dirs(config["misc"]) """ Build data loader """ if params["mode"] == "train": dset = dataset.DataSet(config["train_loader"]) else: dset = dataset.DataSet(config["test_loader"]) L = data.DataLoader(dset, batch_size=64, \ num_workers=config["misc"]["num_workers"], \ shuffle=False, collate_fn=dataset.collate_fn) config = M.override_config_from_loader(config, dset)