def val(args): model = get_model(args) model.eval() for p in model.parameters(): p.requires_grad = False if not os.path.exists(args.snapshot_dir): os.mkdir(args.snapshot_dir) # if not os.path.exists(get_save_dir(args)): # os.makedirs(get_save_dir(args)) hist = np.zeros((21, 21)) for group in range(4): args.group = group print("=" * 20 + "GROUP %d" % (args.group) + "=" * 20) restore(args, model, args.group) pbar = tqdm(total=args.max_steps) pbar.set_description('GROUP %d' % (args.group)) train_loader = data_loader(args) count = 0 for dat in train_loader: count += 1 pbar.update(1) if count > args.max_steps: break que_img, que_mask, supp_img, supp_mask = dat que_img = que_img.cuda() # org_img = get_org_img(que_img.squeeze().cpu().data.numpy()) # cv2.imwrite('query.png', org_img) cat_values = 0 pred_sum = 0 for i in range(5): pos_img = supp_img[i].cuda() pos_mask = supp_mask[i].cuda() pos_mask[pos_mask > 0.] = 1. pos_mask = torch.unsqueeze(pos_mask, dim=1) logits = model(que_img, pos_img, None, pos_mask) out_softmax, pred = model.get_pred(logits, que_img) pred_sum += pred if i == 0: cat_values = out_softmax cat_values[0, :, :] = cat_values[0, :, :] * 0. else: cat_values = torch.cat( (cat_values, out_softmax[1, :, :].unsqueeze(dim=0)), dim=0) val, pred = torch.max(cat_values, dim=0) pred_sum[pred_sum > 0.] = 1.0 pred = pred + args.group * 5 pred = pred_sum * pred tmp_pred = pred.cpu().data.numpy() hist += Metrics.fast_hist( tmp_pred.astype(np.int32), que_mask.squeeze().data.numpy().astype(np.int32), 21) org_img = get_org_img(que_img.squeeze().cpu().data.numpy()) img = mask_to_img(tmp_pred, org_img) cv2.imwrite('save_bins/que_pred/query_%d.png' % (count), img) # org_img = get_org_img(pos_img.squeeze().cpu().data.numpy()) # cv2.imwrite('supp_%d.png'%(i), org_img) # # np_pred = pred.cpu().data.numpy() # cv2.imwrite('%d.png'%(i), np_pred*255) miou = Metrics.get_voc_iou(hist) print('IOU:', miou) print("BMVC:", np.mean(miou[group * 5 + 1:(group + 1) * 5 + 1])) pbar.close() print("=" * 20 + "Overall" + "=" * 20) miou = Metrics.get_voc_iou(hist) print('IOU:', miou, np.mean(miou), np.mean(miou[1:])) binary_hist = np.array((hist[0, 0], hist[0, 1:].sum(), hist[1:, 0].sum(), hist[1:, 1:].sum())).reshape((2, 2)) bin_iu = np.diag(binary_hist) / (binary_hist.sum(1) + binary_hist.sum(0) - np.diag(binary_hist)) print('Bin_iu:', bin_iu)
def main(): """Create the model and start the evaluation process.""" args = get_arguments() if not os.path.exists(args.save): os.makedirs(args.save) if args.model == 'DeeplabMulti': model = DeeplabMulti(num_classes=args.num_classes) elif args.model == 'Oracle': #model = Res_Deeplab(num_classes=args.num_classes) model = DeepLab(backbone='resnet', output_stride=8) if args.restore_from == RESTORE_FROM: args.restore_from = RESTORE_FROM_ORC elif args.model == 'DeeplabVGG': model = DeeplabVGG(num_classes=args.num_classes) if args.restore_from == RESTORE_FROM: args.restore_from = RESTORE_FROM_VGG if args.restore_from[:4] == 'http': saved_state_dict = model_zoo.load_url(args.restore_from) else: saved_state_dict = torch.load(args.restore_from) ### for running different versions of pytorch model_dict = model.state_dict() saved_state_dict = { k: v for k, v in saved_state_dict.items() if k in model_dict } model_dict.update(saved_state_dict) model.load_state_dict(saved_state_dict) device = torch.device("cuda" if not args.cpu else "cpu") model = model.to(device) model.eval() num_classes = 20 tp_list = [0] * num_classes fp_list = [0] * num_classes fn_list = [0] * num_classes iou_list = [0] * num_classes hist = np.zeros((21, 21)) group = 1 scorer = SegScorer(num_classes=21) datalayer = SSDatalayer(group) cos_similarity_func = nn.CosineSimilarity() for count in tqdm(range(1000)): dat = datalayer.dequeue() ref_img = dat['second_img'][0] # (3, 457, 500) query_img = dat['first_img'][0] # (3, 375, 500) query_label = dat['second_label'][0] # (1, 375, 500) ref_label = dat['first_label'][0] # (1, 457, 500) # query_img = dat['second_img'][0] # ref_img = dat['first_img'][0] # ref_label = dat['second_label'][0] # query_label = dat['first_label'][0] deploy_info = dat['deploy_info'] semantic_label = deploy_info['first_semantic_labels'][0][0] - 1 # 2 ref_img, ref_label = torch.Tensor(ref_img).cuda(), torch.Tensor( ref_label).cuda() query_img, query_label = torch.Tensor(query_img).cuda(), torch.Tensor( query_label[0, :, :]).cuda() #ref_img, ref_label = torch.Tensor(ref_img), torch.Tensor(ref_label) #query_img, query_label = torch.Tensor(query_img), torch.Tensor(query_label[0, :, :]) # ref_img = ref_img*ref_label ref_img_var, query_img_var = Variable(ref_img), Variable(query_img) query_label_var, ref_label_var = Variable(query_label), Variable( ref_label) ref_img_var = torch.unsqueeze(ref_img_var, dim=0) # [1, 3, 457, 500] ref_label_var = torch.unsqueeze(ref_label_var, dim=1) # [1, 1, 457, 500] query_img_var = torch.unsqueeze(query_img_var, dim=0) # [1, 3, 375, 500] query_label_var = torch.unsqueeze(query_label_var, dim=0) # [1, 375, 500] samples = torch.cat([ref_img_var, query_img_var], 0) pred = model(samples, ref_label_var) w, h = query_label.size() pred = F.upsample(pred, size=(w, h), mode='bilinear') #[2, 416, 416] pred = F.softmax(pred, dim=1).squeeze() values, pred = torch.max(pred, dim=0) #print(pred.shape) pred = pred.data.cpu().numpy().astype(np.int32) # (333, 500) #print(pred.shape) org_img = get_org_img( query_img.squeeze().cpu().data.numpy()) # 查询集的图片(375, 500, 3) #print(org_img.shape) img = mask_to_img(pred, org_img) # (375, 500, 3)mask和原图加权后的彩色图片 cv2.imwrite('save_bins/que_pred/query_set_1_%d.png' % (count), img) query_label = query_label.cpu().numpy().astype(np.int32) # (333, 500) class_ind = int(deploy_info['first_semantic_labels'][0][0] ) - 1 # because class indices from 1 in data layer,0 scorer.update(pred, query_label, class_ind + 1) tp, tn, fp, fn = measure(query_label, pred) # iou_img = tp/float(max(tn+fp+fn,1)) tp_list[class_ind] += tp fp_list[class_ind] += fp fn_list[class_ind] += fn # max in case both pred and label are zero iou_list = [ tp_list[ic] / float(max(tp_list[ic] + fp_list[ic] + fn_list[ic], 1)) for ic in range(num_classes) ] tmp_pred = pred tmp_pred[tmp_pred > 0.5] = class_ind + 1 tmp_gt_label = query_label tmp_gt_label[tmp_gt_label > 0.5] = class_ind + 1 hist += Metrics.fast_hist(tmp_pred, query_label, 21) print("-------------GROUP %d-------------" % (group)) print(iou_list) class_indexes = range(group * 5, (group + 1) * 5) print('Mean:', np.mean(np.take(iou_list, class_indexes))) ''' for group in range(2): datalayer = SSDatalayer(group+1) restore(args, model, group+1) for count in tqdm(range(1000)): dat = datalayer.dequeue() ref_img = dat['second_img'][0]#(3, 457, 500) query_img = dat['first_img'][0]#(3, 375, 500) query_label = dat['second_label'][0]#(1, 375, 500) ref_label = dat['first_label'][0]#(1, 457, 500) # query_img = dat['second_img'][0] # ref_img = dat['first_img'][0] # ref_label = dat['second_label'][0] # query_label = dat['first_label'][0] deploy_info = dat['deploy_info'] semantic_label = deploy_info['first_semantic_labels'][0][0] - 1#2 ref_img, ref_label = torch.Tensor(ref_img).cuda(), torch.Tensor(ref_label).cuda() query_img, query_label = torch.Tensor(query_img).cuda(), torch.Tensor(query_label[0,:,:]).cuda() #ref_img, ref_label = torch.Tensor(ref_img), torch.Tensor(ref_label) #query_img, query_label = torch.Tensor(query_img), torch.Tensor(query_label[0, :, :]) # ref_img = ref_img*ref_label ref_img_var, query_img_var = Variable(ref_img), Variable(query_img) query_label_var, ref_label_var = Variable(query_label), Variable(ref_label) ref_img_var = torch.unsqueeze(ref_img_var,dim=0)#[1, 3, 457, 500] ref_label_var = torch.unsqueeze(ref_label_var, dim=1)#[1, 1, 457, 500] query_img_var = torch.unsqueeze(query_img_var, dim=0)#[1, 3, 375, 500] query_label_var = torch.unsqueeze(query_label_var, dim=0)#[1, 375, 500] logits = model(query_img_var, ref_img_var, ref_label_var,ref_label_var) # w, h = query_label.size() # outB_side = F.upsample(outB_side, size=(w, h), mode='bilinear') # out_side = F.softmax(outB_side, dim=1).squeeze() # values, pred = torch.max(out_side, dim=0) values, pred = model.get_pred(logits, query_img_var)#values[2, 333, 500] pred = pred.data.cpu().numpy().astype(np.int32)#(333, 500) query_label = query_label.cpu().numpy().astype(np.int32)#(333, 500) class_ind = int(deploy_info['first_semantic_labels'][0][0])-1 # because class indices from 1 in data layer,0 scorer.update(pred, query_label, class_ind+1) tp, tn, fp, fn = measure(query_label, pred) # iou_img = tp/float(max(tn+fp+fn,1)) tp_list[class_ind] += tp fp_list[class_ind] += fp fn_list[class_ind] += fn # max in case both pred and label are zero iou_list = [tp_list[ic] / float(max(tp_list[ic] + fp_list[ic] + fn_list[ic],1)) for ic in range(num_classes)] tmp_pred = pred tmp_pred[tmp_pred>0.5] = class_ind+1 tmp_gt_label = query_label tmp_gt_label[tmp_gt_label>0.5] = class_ind+1 hist += Metrics.fast_hist(tmp_pred, query_label, 21) print("-------------GROUP %d-------------"%(group)) print(iou_list) class_indexes = range(group*5, (group+1)*5) print('Mean:', np.mean(np.take(iou_list, class_indexes))) print('BMVC IOU', np.mean(np.take(iou_list, range(0,20)))) miou = Metrics.get_voc_iou(hist) print('IOU:', miou, np.mean(miou)) ''' binary_hist = np.array((hist[0, 0], hist[0, 1:].sum(), hist[1:, 0].sum(), hist[1:, 1:].sum())).reshape((2, 2)) bin_iu = np.diag(binary_hist) / (binary_hist.sum(1) + binary_hist.sum(0) - np.diag(binary_hist)) print('Bin_iu:', bin_iu) scores = scorer.score() for k in scores.keys(): print(k, np.mean(scores[k]), scores[k])
def val(args): num_classes = 20 tp_list = [0] * num_classes fp_list = [0] * num_classes fn_list = [0] * num_classes iou_list = [0] * num_classes hist = np.zeros((21, 21)) scorer = SegScorer(num_classes=21) for group in range(4): datalayer = SSDatalayer(group) for count in tqdm(range(1000)): model, optimizer = get_model(args) model.eval() restore(args, model, group) dat = datalayer.dequeue() ref_img = dat['second_img'][0] query_img = dat['first_img'][0] query_label = dat['second_label'][0] ref_label = dat['first_label'][0] # query_img = dat['second_img'][0] # ref_img = dat['first_img'][0] # ref_label = dat['second_label'][0] # query_label = dat['first_label'][0] deploy_info = dat['deploy_info'] semantic_label = deploy_info['first_semantic_labels'][0][0] - 1 ref_img, ref_label = torch.Tensor(ref_img).cuda(), torch.Tensor( ref_label).cuda() query_img, query_label = torch.Tensor( query_img).cuda(), torch.Tensor(query_label[0, :, :]).cuda() # ref_img = ref_img*ref_label ref_img_var, query_img_var = Variable(ref_img), Variable(query_img) query_label_var, ref_label_var = Variable(query_label), Variable( ref_label) ref_img_var = torch.unsqueeze(ref_img_var, dim=0) #1*3*500*375 ref_label_var = torch.unsqueeze(ref_label_var, dim=1) #1*500*375 query_img_var = torch.unsqueeze(query_img_var, dim=0) #1*3*375*500 query_label_var = torch.unsqueeze(query_label_var, dim=0) #1*375*500 logits_A, logits_B = model.forward_1way_1shot_heat( query_img_var, ref_img_var, query_label_var, ref_label_var) # logits_A = loss_val, cluster_loss, loss_bce = model.get_loss( logits_A, ref_label_var) optimizer.zero_grad() loss_val.backward() optimizer.step() values, pred = model.get_pred( logits_B, query_img_var) #values=2*375*500; pred=375*500 pred = pred.data.cpu().numpy().astype(np.int32) #187500 query_label = query_label.cpu().numpy().astype(np.int32) #187500 class_ind = int(deploy_info['first_semantic_labels'][0][0] ) - 1 # because class indices from 1 in data layer scorer.update(pred, query_label, class_ind + 1) tp, tn, fp, fn = measure(query_label, pred) # iou_img = tp/float(max(tn+fp+fn,1)) tp_list[class_ind] += tp fp_list[class_ind] += fp fn_list[class_ind] += fn # max in case both pred and label are zero iou_list = [ tp_list[ic] / float(max(tp_list[ic] + fp_list[ic] + fn_list[ic], 1)) for ic in range(num_classes) ] tmp_pred = pred tmp_pred[tmp_pred > 0.5] = class_ind + 1 tmp_gt_label = query_label tmp_gt_label[tmp_gt_label > 0.5] = class_ind + 1 hist += Metrics.fast_hist(tmp_pred, query_label, 21) print("-------------GROUP %d-------------" % (group)) print iou_list class_indexes = range(group * 5, (group + 1) * 5) print 'Mean:', np.mean(np.take(iou_list, class_indexes)) print('BMVC IOU', np.mean(np.take(iou_list, range(0, 20)))) miou = Metrics.get_voc_iou(hist) print('IOU:', miou, np.mean(miou)) binary_hist = np.array((hist[0, 0], hist[0, 1:].sum(), hist[1:, 0].sum(), hist[1:, 1:].sum())).reshape((2, 2)) bin_iu = np.diag(binary_hist) / (binary_hist.sum(1) + binary_hist.sum(0) - np.diag(binary_hist)) print('Bin_iu:', bin_iu)
def val(args): losses = AverageMeter() model = get_model(args) model.eval() num_classes = 20 tp_list = [0]*num_classes fp_list = [0]*num_classes fn_list = [0]*num_classes iou_list = [0]*num_classes hist = np.zeros((21, 21)) scorer = SegScorer(num_classes=21) for group in range(4): datalayer = SSDatalayer(group, 5) restore(args, model, group) for count in tqdm(range(1000)): dat = datalayer.dequeue() query_img = dat['second_img'] ref_img = dat['first_img'] ref_label = dat['second_label'] query_label = dat['first_label'] # print(dat.keys(), len(ref_img), len(query_img), len(query_label), len(ref_label)) # exit(0) # ref_img = dat['second_img'][0] # query_img = dat['first_img'][0] # query_label = dat['second_label'][0] # ref_label = dat['first_label'][0] deploy_info = dat['deploy_info'] semantic_label = deploy_info['first_semantic_labels'][0][0] - 1 # ref_img, ref_label = torch.Tensor(ref_img).cuda(), torch.Tensor(ref_label).cuda() query_img, query_label = torch.Tensor(query_img[0]).cuda(), torch.Tensor(query_label[0][0,:,:]).cuda() query_img_var = Variable(query_img) query_label_var = Variable(query_label) # ref_img_var, query_img_var = Variable(ref_img), Variable(query_img) # query_label_var, ref_label_var = Variable(query_label), Variable(ref_label) # ref_img_var = torch.unsqueeze(ref_img_var,dim=0) # ref_label_var = torch.unsqueeze(ref_label_var, dim=1) query_img_var = torch.unsqueeze(query_img_var, dim=0) query_label_var = torch.unsqueeze(query_label_var, dim=0) ref_img_var_list = [img for img in ref_img] ref_label_var_list = [label for label in ref_label] for p in model.parameters(): p.requires_grad = False # logits = model(query_img_var, ref_img_var, ref_label_var,ref_label_var) logits = model.forward_5shot_avg(query_img_var, ref_img_var_list, ref_label_var_list) # w, h = query_label.size() # outB_side = F.upsample(outB_side, size=(w, h), mode='bilinear') # out_side = F.softmax(outB_side, dim=1).squeeze() # values, pred = torch.max(out_side, dim=0) values, pred = model.get_pred(logits, query_img_var) pred = pred.data.cpu().numpy() query_label = query_label.cpu().numpy() class_ind = int(deploy_info['first_semantic_labels'][0][0])-1 # because class indices from 1 in data layer scorer.update(pred, query_label, class_ind+1) tp, tn, fp, fn = measure(query_label, pred) # iou_img = tp/float(max(tn+fp+fn,1)) tp_list[class_ind] += tp fp_list[class_ind] += fp fn_list[class_ind] += fn # max in case both pred and label are zero iou_list = [tp_list[ic] / float(max(tp_list[ic] + fp_list[ic] + fn_list[ic],1)) for ic in range(num_classes)] tmp_pred = pred tmp_pred[tmp_pred>0.5] = class_ind+1 tmp_gt_label = query_label tmp_gt_label[tmp_gt_label>0.5] = class_ind+1 hist += Metrics.fast_hist(tmp_pred, query_label, 21) print("-------------GROUP %d-------------"%(group)) print iou_list class_indexes = range(group*5, (group+1)*5) print 'Mean:', np.mean(np.take(iou_list, class_indexes)) print('BMVC IOU', np.mean(np.take(iou_list, range(0,20)))) miou = Metrics.get_voc_iou(hist) print('IOU:', miou, np.mean(miou)) scores = scorer.score() for k in scores.keys(): print(k, np.mean(scores[k]), scores[k])
def val(args): losses = AverageMeter() num_classes = 20 tp_list = [0] * num_classes fp_list = [0] * num_classes fn_list = [0] * num_classes iou_list = [0] * num_classes hist = np.zeros((21, 21)) scorer = SegScorer(num_classes=21) #test_loader = val_loader(args) for group in range(4): datalayer = SSDatalayer(group, k_shot=1) for count in tqdm(range(1000)): model, optimizer = get_model(args) model.eval() restore(args, model, group) dat = datalayer.dequeue() s_class_one = dat['first_img'][0] s_one_label = dat['first_label'][0] s_class_two = dat['first_img'][1] s_two_label = dat['first_label'][1] query_img = dat['second_img'][0] query_label = dat['second_label'][0] s1_img_list = [warper_img(img) for img in s_class_one] s1_label_list = [warper_img(img) for img in s_one_label] s2_img_list = [warper_img(img) for img in s_class_two] s2_label_list = [warper_img(img) for img in s_two_label] query_img, query_label = torch.Tensor( query_img).cuda(), torch.Tensor(query_label[0, :, :]).cuda() support_img_var = [] support_img_var.append(s1_img_list) support_img_var.append(s2_img_list) support_label_var = [] support_label_var.append(s1_label_list) support_label_var.append(s2_label_list) query_img = torch.unsqueeze(query_img, dim=0) # 1*3*375*500 deploy_info = dat['deploy_info'] semantic_label = deploy_info['second_semantic_labels'][0] - 1 logits_A_one, logits_A_two, logits_B = model.forward_2way_1shot_avg( query_img, support_img_var, support_label_var) loss_A_one, cluster_loss, loss_bce = model.get_2way_loss( logits_A_one, support_label_var[0][0]) loss_A_two, cluster_loss, loss_bce = model.get_2way_loss( logits_A_two, support_label_var[1][0]) loss_val = loss_A_one + loss_A_two optimizer.zero_grad() loss_val.backward() optimizer.step() values, pred = model.get_pred( logits_B, query_img) #values=375*500; pred=375*500 w, h = query_label.size() pred = pred.view(w, h) pred = pred.data.cpu().numpy().astype(np.int32) #187500 query_label = query_label.data.cpu().numpy().astype( np.int32) #187500 class_ind = int(deploy_info['second_semantic_labels'][0] ) - 1 # because class indices from 1 in data layer scorer.update(pred, query_label, class_ind + 1) tp, tn, fp, fn = measure(query_label, pred) # iou_img = tp/float(max(tn+fp+fn,1)) tp_list[class_ind] += tp fp_list[class_ind] += fp fn_list[class_ind] += fn # max in case both pred and label are zero iou_list = [ tp_list[ic] / float(max(tp_list[ic] + fp_list[ic] + fn_list[ic], 1)) for ic in range(num_classes) ] tmp_pred = pred tmp_pred[tmp_pred > 0.5] = class_ind + 1 tmp_gt_label = query_label tmp_gt_label[tmp_gt_label > 0.5] = class_ind + 1 hist += Metrics.fast_hist(tmp_pred, query_label, 21) print("-------------GROUP %d-------------" % (group)) print iou_list class_indexes = range(group * 5, (group + 1) * 5) print 'Mean:', np.mean(np.take(iou_list, class_indexes)) print('BMVC IOU', np.mean(np.take(iou_list, range(0, 20)))) miou = Metrics.get_voc_iou(hist) print('IOU:', miou, np.mean(miou)) binary_hist = np.array((hist[0, 0], hist[0, 1:].sum(), hist[1:, 0].sum(), hist[1:, 1:].sum())).reshape((2, 2)) bin_iu = np.diag(binary_hist) / (binary_hist.sum(1) + binary_hist.sum(0) - np.diag(binary_hist)) print('Bin_iu:', bin_iu)
def val(args): model = get_model(args) model.eval() num_classes = 20 tp_list = [0]*num_classes fp_list = [0]*num_classes fn_list = [0]*num_classes iou_list = [0]*num_classes hist = np.zeros((21, 21)) scorer = SegScorer(num_classes=21) for group in range(4): datalayer = SSDatalayer(group) restore(args, model, group) for count in tqdm(range(1000)): dat = datalayer.dequeue() ref_img = dat['second_img'][0] query_img = dat['first_img'][0] query_label = dat['second_label'][0] ref_label = dat['first_label'][0] # query_img = dat['second_img'][0] # ref_img = dat['first_img'][0] # ref_label = dat['second_label'][0] # query_label = dat['first_label'][0] deploy_info = dat['deploy_info'] semantic_label = deploy_info['first_semantic_labels'][0][0] - 1 ref_img, ref_label = torch.Tensor(ref_img).cuda(), torch.Tensor(ref_label).cuda() query_img, query_label = torch.Tensor(query_img).cuda(), torch.Tensor(query_label[0,:,:]).cuda() # ref_img = ref_img*ref_label ref_img_var, query_img_var = Variable(ref_img), Variable(query_img) query_label_var, ref_label_var = Variable(query_label), Variable(ref_label) ref_img_var = torch.unsqueeze(ref_img_var,dim=0) ref_label_var = torch.unsqueeze(ref_label_var, dim=1) query_img_var = torch.unsqueeze(query_img_var, dim=0) query_label_var = torch.unsqueeze(query_label_var, dim=0) logits = model(query_img_var, ref_img_var, ref_label_var,ref_label_var) # w, h = query_label.size() # outB_side = F.upsample(outB_side, size=(w, h), mode='bilinear') # out_side = F.softmax(outB_side, dim=1).squeeze() # values, pred = torch.max(out_side, dim=0) values, pred = model.get_pred(logits, query_img_var) pred = pred.data.cpu().numpy().astype(np.int32) query_label = query_label.cpu().numpy().astype(np.int32) class_ind = int(deploy_info['first_semantic_labels'][0][0])-1 # because class indices from 1 in data layer scorer.update(pred, query_label, class_ind+1) tp, tn, fp, fn = measure(query_label, pred) # iou_img = tp/float(max(tn+fp+fn,1)) tp_list[class_ind] += tp fp_list[class_ind] += fp fn_list[class_ind] += fn # max in case both pred and label are zero iou_list = [tp_list[ic] / float(max(tp_list[ic] + fp_list[ic] + fn_list[ic],1)) for ic in range(num_classes)] tmp_pred = pred tmp_pred[tmp_pred>0.5] = class_ind+1 tmp_gt_label = query_label tmp_gt_label[tmp_gt_label>0.5] = class_ind+1 hist += Metrics.fast_hist(tmp_pred, query_label, 21) print("-------------GROUP %d-------------"%(group)) print iou_list class_indexes = range(group*5, (group+1)*5) print 'Mean:', np.mean(np.take(iou_list, class_indexes)) print('BMVC IOU', np.mean(np.take(iou_list, range(0,20)))) miou = Metrics.get_voc_iou(hist) print('IOU:', miou, np.mean(miou)) binary_hist = np.array((hist[0, 0], hist[0, 1:].sum(),hist[1:, 0].sum(), hist[1:, 1:].sum())).reshape((2, 2)) bin_iu = np.diag(binary_hist) / (binary_hist.sum(1) + binary_hist.sum(0) - np.diag(binary_hist)) print('Bin_iu:', bin_iu) scores = scorer.score() for k in scores.keys(): print(k, np.mean(scores[k]), scores[k])