def __init__(self, module_path, hyper_params, use_cuda, test_rate=1.0, USE_EXIST_RES=False, mission=1): print("Test rate:", test_rate) _, self.dataset = get_dataset(valid_rate=test_rate, USE_TRANSFORM=False, mission=mission) print("test number:", len(self.dataset)) self.hyper_params = hyper_params self.data_loader = DataLoader( dataset=self.dataset, num_workers=self.hyper_params["threads"], batch_size=self.hyper_params["batch_size"], shuffle=False) self.resnet = get_network(mission=mission) self.resnet.load_state_dict(torch.load(module_path)) if use_cuda: self.resnet = self.resnet.cuda() self.v = Validator(resnet=self.resnet, hyper_params=hyper_params, use_cuda=use_cuda, data_loader=self.data_loader)
def __init__(self, module_path, hyper_params, use_cuda, mission=1): self.dataset = get_test_set(mission=mission) print("test number:", len(self.dataset)) self.hyper_params = hyper_params self.data_loader = DataLoader( dataset=self.dataset, num_workers=self.hyper_params["threads"], batch_size=self.hyper_params["batch_size"], shuffle=False) self.resnet = get_network() self.resnet.load_state_dict(torch.load(module_path)) if use_cuda: self.resnet = self.resnet.cuda() self.v = Validator(resnet=self.resnet, hyper_params=hyper_params, use_cuda=use_cuda, data_loader=self.data_loader)
def __init__(self, model_path, save_dir, hyper_params, use_cuda, mission=1): self.dataset = get_test_set(mission=mission) self.hyper_params = hyper_params self.data_loader = DataLoader( dataset=self.dataset, num_workers=self.hyper_params["threads"], batch_size=self.hyper_params["batch_size"], shuffle=False) self.save_dir = save_dir self.model_path = model_path self.use_cuda = use_cuda self.mission = mission self.resnet = get_network(mission=mission) self.resnet.load_state_dict(torch.load(model_path)) if use_cuda: self.resnet = self.resnet.cuda() pass
def setup(self, valid_rate=0.1, use_cuda=True, model_path="", use_exist_dataset=False, module_save_dir="", mission=1, criterion=None, hyper_params=None, FREEZE_PARAM=False, PRETRAINED=False): """setup the module""" self.train_dataset, self.valid_dataset = get_dataset( valid_rate, USE_TRANSFORM=True, mission=mission) self.hyper_params = hyper_params self.train_data_loader = DataLoader( dataset=self.train_dataset, num_workers=self.hyper_params["threads"], batch_size=self.hyper_params["batch_size"], shuffle=True) self.valid_data_loader = DataLoader( dataset=self.valid_dataset, num_workers=self.hyper_params["threads"], batch_size=self.hyper_params["batch_size"], shuffle=False) self.use_cuda = use_cuda self.resnet = get_network(mission) if PRETRAINED: self.resnet.load_state_dict(torch.load(model_path)) if use_cuda: self.resnet = self.resnet.cuda() if SHOW_NET: from torchsummary import summary batch_size = self.hyper_params["batch_size"] input_size = self.hyper_params["input_size"][0] summary(self.resnet, (3, input_size, input_size), batch_size) if (self.hyper_params["optimizer"] == "SGD"): self.optimizer = torch.optim.SGD( self.resnet.parameters(), lr=self.hyper_params["learning_rate"], momentum=0.99) elif (self.hyper_params["optimizer"] == "Adam"): self.optimizer = torch.optim.Adam( self.resnet.parameters(), lr=self.hyper_params["learning_rate"], ) self.StepLR = torch.optim.lr_scheduler.StepLR( self.optimizer, step_size=self.hyper_params["step_size"], gamma=self.hyper_params["lr_gamma"]) self.criterion = torch.nn.CrossEntropyLoss() self.module_save_dir = module_save_dir self.v = Validator(resnet=self.resnet, hyper_params=hyper_params, use_cuda=use_cuda, data_loader=self.valid_data_loader) pass