weights_init(model) # #------------------------------------------------------# # 权值文件请看README,百度网盘下载 #------------------------------------------------------# model_path = 'model_data/voc_weights_resnet.pth' print('Loading weights into state dict...') device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') model_dict = model.state_dict() pretrained_dict = torch.load(model_path, map_location=device) pretrained_dict = {k: v for k, v in pretrained_dict.items() if np.shape(model_dict[k]) == np.shape(v)} model_dict.update(pretrained_dict) model.load_state_dict(model_dict) print('Finished!') net = model.train() if Cuda: net = torch.nn.DataParallel(model) cudnn.benchmark = True net = net.cuda() loss_history = LossHistory("logs/") annotation_path = '2007_train.txt' #----------------------------------------------------------------------# # 验证集的划分在train.py代码里面进行 # 2007_test.txt和2007_val.txt里面没有内容是正常的。训练不会使用到。 # 当前划分方式下,验证集和训练集的比例为1:9 #----------------------------------------------------------------------# val_split = 0.1 with open(annotation_path) as f:
model_path = 'model_data/voc_weights_resnet.pth' print('Loading weights into state dict...') device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') model_dict = model.state_dict() #OrderedDict :328 pretrained_dict = torch.load(model_path, map_location=device) #OrderedDict :328 pretrained_dict = { k: v for k, v in pretrained_dict.items() if np.shape(model_dict[k]) == np.shape(v) } model_dict.update(pretrained_dict) #更新模型的预训练参数 model.load_state_dict(model_dict) print('Finished!') net = model.train() #设置model为训练模式 if Cuda: net = torch.nn.DataParallel(model) cudnn.benchmark = True net = net.cuda() annotation_path = 'train.txt' #----------------------------------------------------------------------# # 验证集的划分在train.py代码里面进行 # 2007_test.txt和2007_val.txt里面没有内容是正常的。训练不会使用到。 # 当前划分方式下,验证集和训练集的比例为1:9 #----------------------------------------------------------------------# val_split = 0.1 with open(annotation_path) as f: lines = f.readlines() np.random.seed(10101)