def xsnet1x34(pretrained_model=None, **kwargs): model = XSNet(Block, [4, 4, 8], [32 * 2, 64 * 2, 96 * 2], **kwargs) # model = XSNet(Block, [2, 4, 4], [32, 64, 96], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def Seg_Model(num_classes, criterion=None, pretrained_model=None, recurrence=0, **kwargs): model = ResNet(Bottleneck,[3, 4, 23, 3], num_classes, criterion, recurrence) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def Seg_Model(num_classes, criterion=None, pretrained_model=None): model = ResNet(Bottleneck, [3, 4, 23, 3], num_classes, criterion) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def iwxsnet18(pretrained_model=None, **kwargs): model = XSNet(Block, [2, 2, 4], [32, 64, 96], **kwargs) # model = XSNet(Block, [2, 4, 4], [32, 64, 96], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def set_ckpt(self, path): self.save_pth = path try: self.net = load_model(self.net, self.save_pth) self.signal_message.emit(int(2)) except RuntimeError: self.signal_message.emit(int(1))
def set_net(self, net): self.net_num = net print(net) self.init_net() self.net = load_model(self.net, self.save_pth) self.net.eval() if self.is_cuda: self.net.cuda()
def run(self, model_path, model_indice, log_file, log_file_link): """There are four evaluation modes: 1.only eval a .pth model: -e *.pth 2.only eval a certain epoch: -e epoch 3.eval all epochs in a given section: -e start_epoch-end_epoch 4.eval all epochs from a certain started epoch: -e start_epoch- """ if '.pth' in model_indice: models = [ model_indice, ] elif "-" in model_indice: start_epoch = int(model_indice.split("-")[0]) end_epoch = model_indice.split("-")[1] models = os.listdir(model_path) models.remove("epoch-last.pth") sorted_models = [None] * len(models) model_idx = [0] * len(models) for idx, m in enumerate(models): num = m.split(".")[0].split("-")[1] model_idx[idx] = num sorted_models[idx] = m model_idx = np.array([int(i) for i in model_idx]) down_bound = model_idx >= start_epoch up_bound = [True] * len(sorted_models) if end_epoch: end_epoch = int(end_epoch) assert start_epoch < end_epoch up_bound = model_idx <= end_epoch bound = up_bound * down_bound model_slice = np.array(sorted_models)[bound] models = [os.path.join(model_path, model) for model in model_slice] else: models = [ os.path.join(model_path, 'epoch-%s.pth' % model_indice), ] results = open(log_file, 'a') link_file(log_file, log_file_link) for model in models: logger.info("Load Model: %s" % model) self.val_func = load_model(self.network, model) # for name, parameters in self.val_func.named_parameters(): # print(name, ':', parameters.size()) result_line = self.multi_process_evaluation() results.write('Model: ' + model + '\n') results.write(result_line) results.write('\n') results.flush() results.close()
def xsnet18v2(pretrained_model=None, **kwargs): model = XSNet(Block, [2, 2, 4], [32, 64, 96], context_embedding=ContextEmbeddingV2, **kwargs) # model = XSNet(Block, [2, 4, 4], [32, 64, 96], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def resnet152(pretrained_model=None, **kwargs): model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model # if __name__ == "__main__": # res = resnet101() # print(res)
def set_dataset(self, dataset): self.dataset = dataset if self.dataset == 0: self.color_map = self.color_map_fish elif self.dataset == 1: self.color_map = self.color_map_city self.init_net() self.net = load_model(self.net, self.save_pth) self.net.eval() if self.is_cuda: self.net.cuda()
def __init__(self,is_cuda): super(Inference, self).__init__() self.mode = 0 self.video_path = 'city.avi' self.camera_num = 0 self.source = 0 self.stop = False self.is_cuda = is_cuda self.dataset = 0 # Fish-0 city-1 self.net_num = 0 self.num_classes = 18 self.input_path = './res/1_Img8bit.png' self.label_path = './res/1_gtFine_labelTrainIds.png' self.gt_path = './res/1_gtFine_color.png' mean = (104.00698793, 116.66876762, 122.67891434) respth = './res' self.color_map_fish = [[128, 64, 128], [250, 170, 160], [250, 170, 30], [220, 220, 0], [153, 153, 153], [180, 165, 180], [243, 35, 232], [220, 20, 59], [254, 0, 0], [0, 0, 142], [0, 0, 70], [1, 60, 100], [0, 0, 230], [119, 12, 32], [70, 70, 70], [107, 142, 35], [153, 251, 152], [70, 130, 180]] self.color_map_fish.append([0, 0, 0]) self.color_map_fish = np.array(self.color_map_fish) with open('./cityscapes_info.json', 'r') as fr: labels_info = json.load(fr) self.lb_map = {el['id']: el['trainId'] for el in labels_info} color = {el['trainId']: el['color'] for el in labels_info} self.color_map_city = [] for i in range(19): self.color_map_city.append(color[i]) self.color_map_city.append([0, 0, 0]) self.color_map_city = np.array(self.color_map_city) self.save_pth = osp.join(respth, 'FT.pth') self.init_net() self.net = load_model(self.net, self.save_pth) self.net.eval() if self.is_cuda: self.net.cuda()
def Seg_Model(num_classes, criterion=None, pretrained_model=None): model = ResNet(Bottleneck, [3, 4, 23, 3], num_classes, criterion) if pretrained_model is not None: model = load_model(model, pretrained_model) # device = torch.device('cpu') # saved_state_dict = torch.load(pretrained_model, map_location=device) # new_params = model.state_dict().copy() # for i in saved_state_dict: # #Scale.layer5.conv2d_list.3.weight # i_parts = i.split('.') # # print i_parts # # if not i_parts[1]=='layer5': # if not i_parts[0]=='fc': # new_params['.'.join(i_parts[0:])] = saved_state_dict[i] # model.load_state_dict(new_params) return model
def restore_checkpoint(self): t_start = time.time() if self.distributed: tmp = torch.load(self.continue_state_object, map_location=lambda storage, loc: storage.cuda( self.local_rank)) else: tmp = torch.load(self.continue_state_object) t_ioend = time.time() self.state.model = load_model(self.state.model, tmp['model'], True) self.state.optimizer.load_state_dict(tmp['optimizer']) self.state.epoch = tmp['epoch'] + 1 self.state.iteration = tmp['iteration'] del tmp t_end = time.time() logger.info("Load checkpoint from file {}, " "Time usage:\n\tIO: {}, restore snapshot: {}".format( self.continue_state_object, t_ioend - t_start, t_end - t_ioend))
def run(self, model_path, model_indice, log_file, log_file_link): """Evaluate models.""" if '.pth' in model_indice: models = [model_indice, ] else: models = [os.path.join(model_path, 'epoch-%s.pth' % model_indice), ] results = open(log_file, 'a') link_file(log_file, log_file_link) for model in models: logger.info("Load Model: %s" % model) self.val_func = load_model(self.network, model) result_line = self.multi_process_evaluation() results.write('Model: ' + model + '\n') results.write(result_line) results.write('\n') results.flush() results.close()
def resnet34(pretrained_model=None, **kwargs): model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def xception39(pretrained_model=None, **kwargs): model = Xception(Block, [4, 8, 4], [16, 32, 64], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def xsnet18_v1b(pretrained_model=None, **kwargs): model = XSNet(Block, [2, 3, 3], [32, 64, 96], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def main(): """Create the model and start the evaluation process.""" parser = get_parser() with Engine(custom_parser=parser) as engine: args = parser.parse_args() cudnn.benchmark = True h, w = map(int, args.input_size.split(',')) if args.whole: input_size = (1024, 2048) else: input_size = (h, w) seg_model = eval('networks.' + args.model + '.Seg_Model')( num_classes=args.num_classes, recurrence=args.recurrence) load_model(seg_model, args.restore_from) device = torch.device("cuda" if torch.cuda.is_available() else "cpu") seg_model.to(device) model = engine.data_parallel(seg_model) model.eval() dataset = CSDataSet(args.data_dir, args.data_list, crop_size=(1024, 2048), mean=IMG_MEAN, scale=False, mirror=False) test_loader, test_sampler = engine.get_test_loader(dataset) if engine.distributed: test_sampler.set_epoch(0) data_list = [] confusion_matrix = np.zeros((args.num_classes, args.num_classes)) palette = get_palette(256) save_path = os.path.join(os.path.dirname(args.restore_from), 'outputs') if not os.path.exists(save_path): os.makedirs(save_path) bar_format = '{desc}[{elapsed}<{remaining},{rate_fmt}]' pbar = tqdm(range(len(test_loader)), file=sys.stdout, bar_format=bar_format) dataloader = iter(test_loader) for idx in pbar: image, label, size, name = dataloader.next() size = size[0].numpy() with torch.no_grad(): output = predict_multiscale(model, image, input_size, [1.0], args.num_classes, False, 0) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray(label.numpy()[:, :size[0], :size[1]], dtype=np.int) for i in range(image.size(0)): output_im = PILImage.fromarray(seg_pred[i]) output_im.putpalette(palette) output_im.save(os.path.join(save_path, name[i] + '.png')) ignore_index = seg_gt != 255 seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] # show_all(gt, output) confusion_matrix += get_confusion_matrix(seg_gt, seg_pred, args.num_classes) print_str = ' Iter{}/{}'.format(idx + 1, len(test_loader)) pbar.set_description(print_str, refresh=False) confusion_matrix = torch.from_numpy( confusion_matrix).contiguous().cuda() confusion_matrix = engine.all_reduce_tensor(confusion_matrix, norm=False).cpu().numpy() pos = confusion_matrix.sum(1) res = confusion_matrix.sum(0) tp = np.diag(confusion_matrix) IU_array = (tp / np.maximum(1.0, pos + res - tp)) mean_IU = IU_array.mean() # getConfusionMatrixPlot(confusion_matrix) if engine.distributed and engine.local_rank == 0: print({'meanIU': mean_IU, 'IU_array': IU_array}) model_path = os.path.dirname(args.restore_from) with open(os.path.join(model_path, 'result.txt'), 'w') as f: f.write( json.dumps({ 'meanIU': mean_IU, 'IU_array': IU_array.tolist() }))
def resnet101_new(pretrained_model=None, **kwargs): model = ResNet_new(Bottleneck, [3, 4, 23, 3], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
def resnet152(pretrained_model=None, **kwargs): model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model
from network import MCNet from utils.pyt_utils import load_model from tools.benchmark.compute_speed import compute_speed from tools.benchmark.compute_flops import compute_flops from tools.benchmark.flops_params_count import get_model_complexity_info if __name__ == "__main__": network = MCNet(10, criterion=None, edge_criterion=None) model_file = "/home/xionghaitao/workplace/segmantic_segmentation/TorchSeg/log/scut.ernet.R101/snapshot/epoch-49.pth" model = load_model(network, model_file) model = model.cuda() model.eval() device = 0 flops_count, params_count = get_model_complexity_info(model, (3, 576, 720))
def xception71(pretrained_model=None, **kwargs): model = Xception(**kwargs) if pretrained_model is not None: model = load_model(model, pretrained_model) return model