Пример #1
0
def get_iou(data_list, class_num, save_path=None):
    from multiprocessing import Pool 
    from utils.metric import ConfusionMatrix

    ConfM = ConfusionMatrix(class_num)
    f = ConfM.generateM
    pool = Pool() 
    m_list = pool.map(f, data_list)
    pool.close() 
    pool.join() 
    
    for m in m_list:
        ConfM.addM(m)

    aveJ, j_list, M = ConfM.jaccard()

    classes = np.array(('background',  # always index 0
               'aeroplane', 'bicycle', 'bird', 'boat',
               'bottle', 'bus', 'car', 'cat', 'chair',
                         'cow', 'diningtable', 'dog', 'horse',
                         'motorbike', 'person', 'pottedplant',
                         'sheep', 'sofa', 'train', 'tvmonitor'))

    for i, iou in enumerate(j_list):
        print('class {:2d} {:12} IU {:.2f}'.format(i, classes[i], j_list[i]))

    
    print('meanIOU: ' + str(aveJ) + '\n')
    if save_path:
        with open(save_path, 'w') as f:
            for i, iou in enumerate(j_list):
                f.write('class {:2d} {:12} IU {:.2f}'.format(i, classes[i], j_list[i]) + '\n')
            f.write('meanIOU: ' + str(aveJ) + '\n')
Пример #2
0
def get_iou(data_list, class_num, save_path=None):
    from multiprocessing import Pool
    from utils.metric import ConfusionMatrix

    ConfM = ConfusionMatrix(class_num)
    f = ConfM.generateM
    pool = Pool()
    m_list = pool.map(f, data_list)
    pool.close()
    pool.join()

    for m in m_list:
        ConfM.addM(m)

    aveJ, j_list, M = ConfM.jaccard()

    classes = np.array(('background',  # always index 0
               'aeroplane', 'bicycle', 'bird', 'boat',
               'bottle', 'bus', 'car', 'cat', 'chair',
                         'cow', 'diningtable', 'dog', 'horse',
                         'motorbike', 'person', 'pottedplant',
                         'sheep', 'sofa', 'train', 'tvmonitor'))

    for i, iou in enumerate(j_list):
        print('class {:2d} {:12} IU {:.2f}'.format(i, classes[i], j_list[i]))


    print('meanIOU: ' + str(aveJ) + '\n')
    if save_path:
        with open(save_path, 'w') as f:
            for i, iou in enumerate(j_list):
                f.write('class {:2d} {:12} IU {:.2f}'.format(i, classes[i], j_list[i]) + '\n')
            f.write('meanIOU: ' + str(aveJ) + '\n')
def get_iou(data_list, class_num, save_path=None):
    from multiprocessing import Pool
    from utils.metric import ConfusionMatrix

    ConfM = ConfusionMatrix(class_num)
    f = ConfM.generateM
    pool = Pool()
    m_list = pool.map(f, data_list)
    pool.close()
    pool.join()

    for m in m_list:
        ConfM.addM(m)

    aveJ, j_list, M = ConfM.jaccard()

    classes = np.array((
        'background',  # always index 0
        'REA',
        'SRF',
        'PED'))

    for i, iou in enumerate(j_list):
        print('class {:2d} {:12} IU {:.2f}'.format(i, classes[i], j_list[i]))

    print('meanIOU: ' + str(aveJ) + '\n')
    if save_path:
        with open(save_path, 'w') as f:
            for i, iou in enumerate(j_list):
                f.write('class {:2d} {:12} IU {:.2f}'.format(
                    i, classes[i], j_list[i]) + '\n')
            f.write('meanIOU: ' + str(aveJ) + '\n')
Пример #4
0
def get_iou(data_list, class_num, dataset, save_path=None):
    from multiprocessing import Pool
    from utils.metric import ConfusionMatrix

    ConfM = ConfusionMatrix(class_num)
    f = ConfM.generateM
    pool = Pool()
    m_list = pool.map(f, data_list)
    pool.close()
    pool.join()

    for m in m_list:
        ConfM.addM(m)

    aveJ, j_list, M = ConfM.jaccard()

    classes = np.array(("road", "sidewalk", "building", "wall", "fence",
                        "pole", "traffic_light", "traffic_sign", "vegetation",
                        "terrain", "sky", "person", "rider", "car", "truck",
                        "bus", "train", "motorcycle", "bicycle"))

    for i, iou in enumerate(j_list):
        print('class {:2d} {:12} IU {:.2f}'.format(i, classes[i],
                                                   100 * j_list[i]))

    print('meanIOU: ' + str(aveJ) + '\n')
    if save_path:
        with open(save_path, 'w') as f:
            for i, iou in enumerate(j_list):
                f.write('class {:2d} {:12} IU {:.2f}'.format(
                    i, classes[i], 100 * j_list[i]) + '\n')
            f.write('meanIOU: ' + str(aveJ) + '\n')
    return aveJ
Пример #5
0
def get_iou(data_list, class_num, dataset, save_path=None):
    from multiprocessing import Pool
    from utils.metric import ConfusionMatrix

    ConfM = ConfusionMatrix(class_num)
    f = ConfM.generateM
    pool = Pool()
    m_list = pool.map(f, data_list)
    pool.close()
    pool.join()

    for m in m_list:
        ConfM.addM(m)

    aveJ, j_list, M = ConfM.jaccard()

    if dataset == 'pascal_voc':
        classes = np.array((
            'background',  # always index 0
            'aeroplane',
            'bicycle',
            'bird',
            'boat',
            'bottle',
            'bus',
            'car',
            'cat',
            'chair',
            'cow',
            'diningtable',
            'dog',
            'horse',
            'motorbike',
            'person',
            'pottedplant',
            'sheep',
            'sofa',
            'train',
            'tvmonitor'))
    elif dataset == 'cityscapes':
        classes = np.array(
            ("road", "sidewalk", "building", "wall", "fence", "pole",
             "traffic_light", "traffic_sign", "vegetation", "terrain", "sky",
             "person", "rider", "car", "truck", "bus", "train", "motorcycle",
             "bicycle"))

    for i, iou in enumerate(j_list):
        print('class {:2d} {:12} IU {:.2f}'.format(i, classes[i], j_list[i]))

    print('meanIOU: ' + str(aveJ) + '\n')
    if save_path:
        with open(save_path, 'w') as f:
            for i, iou in enumerate(j_list):
                f.write('class {:2d} {:12} IU {:.2f}'.format(
                    i, classes[i], j_list[i]) + '\n')
            f.write('meanIOU: ' + str(aveJ) + '\n')
    return aveJ
Пример #6
0
def get_iou(data_list, class_num, save_path=None):
    ConfM = ConfusionMatrix(class_num)
    f = ConfM.generateM
    pool = Pool() 
    m_list = pool.map(f, data_list)
    pool.close() 
    pool.join() 

    for m in m_list:
        ConfM.addM(m)

    aveJ, j_list, M = ConfM.jaccard()
    #print('meanIOU: ' + str(aveJ) + '\n')
    if save_path:
        with open(save_path, 'w') as f:
            f.write('meanIOU: ' + str(aveJ) + '\n')
            f.write(str(j_list)+'\n')
            f.write(str(M)+'\n')
    return aveJ
Пример #7
0
def validate(data_loader, G, F1, F2, args):
    batch_time = AverageMeter('Time', ':6.3f')
    top1_1 = AverageMeter('Acc_1', ':6.2f')
    top1_2 = AverageMeter('Acc_2', ':6.2f')
    progress = ProgressMeter(len(data_loader),
                             [batch_time, top1_1, top1_2],
                             prefix='Test: ')
    G.eval()
    F1.eval()
    F2.eval()
    if args.per_class_eval:
        classes = data_loader.dataset.classes
        confmat = ConfusionMatrix(len(classes))
    else:
        confmat = None

    with torch.no_grad():
        end = time.time()
        for i, (images, target) in enumerate(data_loader):
            images = images.to(device)
            target = target.to(device)
            g = G(images)
            y1, y2 = F1(g), F2(g)
            acc1, = accuracy(y1, target)
            acc2, = accuracy(y2, target)
            if confmat:
                confmat.update(target, y1.argmax(1))
            top1_1.update(acc1.item(), images.size(0))
            top1_2.update(acc2.item(), images.size(0))
            batch_time.update(time.time() - end)
            end = time.time()
            if i % args.print_freq == 0:
                progress.display(i)
        print(' * Acc1 {top1_1.avg:.3f} Acc2 {top1_2.avg:.3f}'.format(top1_1=top1_1, top1_2=top1_2))
        if confmat:
            print(confmat.format(classes))
        return top1_1.avg, top1_2.avg
Пример #8
0
def validate(dataloader, target_iter, classifier, device, args):

    batch_time = AverageMeter('Time', ':6.3f')
    losses = AverageMeter('Loss', ':.4e')
    top1 = AverageMeter('Acc@1', ':6.2f')
    top5 = AverageMeter('Acc@5', ':6.2f')
    progress = ProgressMeter(len(target_iter),
                             [batch_time, losses, top1, top5],
                             prefix='Test: ')
    classifier.eval()
    if args.per_class_eval:
        classes = dataloader.dataset.classes
        confmat = ConfusionMatrix(len(classes))
    else:
        confmat = None
    with torch.no_grad():
        end = time.time()
        for i, (images, target) in enumerate(dataloader):
            images, target = images.to(device), target.to(device)
            output, _ = classifier(images)
            loss = F.cross_entropy(output, target)
            acc1, acc5 = accuracy(output, target, topk=(1, 5))
            if confmat:
                confmat.update(target, output.argmax(1))
            losses.update(loss.item(), images.size(0))
            top1.update(acc1.item(), images.size(0))
            top5.update(acc5.item(), images.size(0))
            batch_time.update(time.time() - end)
            end = time.time()
            if i % args.print_freq == 0:
                progress.display(i)
        print(' * Acc@1 {top1.avg:.3f} Acc@5 {top5.avg:.3f}'.format(top1=top1,
                                                                    top5=top5))
        if confmat:
            print(confmat.format(classes))
    return top1.avg
Пример #9
0
    args = parse_args()

    m_list = []
    data_list = []
    test_ids = [i.strip() for i in open(args.test_ids) if not i.strip() == '']
    for index, img_id in enumerate(test_ids):
        if index % 100 == 0:
            print('%d processd'%(index))
        pred_img_path = os.path.join(args.pred_dir, img_id+'.png')
        gt_img_path = os.path.join(args.gt_dir, img_id+'.png')
        pred = cv2.imread(pred_img_path, cv2.IMREAD_GRAYSCALE)
        gt = cv2.imread(gt_img_path, cv2.IMREAD_GRAYSCALE)
        # show_all(gt, pred)
        data_list.append([gt.flatten(), pred.flatten()])

    ConfM = ConfusionMatrix(args.class_num)
    f = ConfM.generateM
    pool = Pool() 
    m_list = pool.map(f, data_list)
    pool.close() 
    pool.join() 
    
    for m in m_list:
        ConfM.addM(m)

    aveJ, j_list, M = ConfM.jaccard()
    with open(args.save_path, 'w') as f:
        f.write('meanIOU: ' + str(aveJ) + '\n')
        f.write(str(j_list)+'\n')
        f.write(str(M)+'\n')
Пример #10
0
def evaluate(model,
             dataset,
             deeplabv2=True,
             ignore_label=250,
             save_dir=None,
             pretraining='COCO'):
    model.eval()
    if pretraining == 'COCO':
        from utils.transformsgpu import normalize_bgr as normalize
    else:
        from utils.transformsgpu import normalize_rgb as normalize

    if dataset == 'pascal_voc':
        num_classes = 21
        data_loader = get_loader(dataset)
        data_path = get_data_path(dataset)
        test_dataset = data_loader(data_path,
                                   split="val",
                                   scale=False,
                                   mirror=False,
                                   pretraining=pretraining)
        testloader = data.DataLoader(test_dataset,
                                     batch_size=1,
                                     shuffle=False,
                                     pin_memory=True)

    elif dataset == 'cityscapes':
        num_classes = 19
        data_loader = get_loader('cityscapes')
        data_path = get_data_path('cityscapes')
        if deeplabv2:
            data_aug = Compose([Resize_city()])
        else:  # for deeplabv3 oirginal resolution
            data_aug = Compose([Resize_city_highres()])

        test_dataset = data_loader(data_path,
                                   is_transform=True,
                                   split='val',
                                   augmentations=data_aug,
                                   pretraining=pretraining)
        testloader = data.DataLoader(test_dataset,
                                     batch_size=1,
                                     shuffle=False,
                                     pin_memory=True)

    print('Evaluating, found ' + str(len(testloader)) + ' images.')
    confM = ConfusionMatrix(num_classes)

    data_list = []
    total_loss = []

    for index, batch in enumerate(testloader):
        image, label, size, name, _ = batch

        with torch.no_grad():
            interp = torch.nn.Upsample(size=(label.shape[1], label.shape[2]),
                                       mode='bilinear',
                                       align_corners=True)
            output = model(normalize(Variable(image).cuda(), dataset))
            output = interp(output)

            label_cuda = Variable(label.long()).cuda()
            criterion = CrossEntropy2d(ignore_label=ignore_label).cuda()
            loss = criterion(output, label_cuda)
            total_loss.append(loss.item())

            output = output.cpu().data[0].numpy()
            gt = np.asarray(label[0].numpy(), dtype=np.int)

            output = np.asarray(np.argmax(output, axis=0), dtype=np.int)
            data_list.append((np.reshape(gt, (-1)), np.reshape(output, (-1))))

            # filename = 'output_images/' + name[0].split('/')[-1]
            # cv2.imwrite(filename, output)

        if (index + 1) % 100 == 0:
            # print('%d processed' % (index + 1))
            process_list_evaluation(confM, data_list)
            data_list = []

    process_list_evaluation(confM, data_list)

    mIoU = get_iou(confM, dataset)
    loss = np.mean(total_loss)
    return mIoU, loss
Пример #11
0
        interpolated_label[start_idx:end_idx] = np.argmax(
            interpolated_prob, axis=-1)  # (batch_size,)

    interpolated_common_label = _2common(interpolated_label,
                                         from_dataset)  # (N1,)
    interpolated_color = _label_to_colors_by_name(interpolated_label,
                                                  from_dataset)  # (N1, 3)
    interpolated_common_color = _label_to_colors_by_name(
        interpolated_common_label, 'common')  # (N1, 3)
    del interpolated_common_label

    # evaluate interpolation result if ground truth is available
    raw_label_path = os.path.join(raw_folder, fname_without_ext + '.labels')
    if from_dataset == to_dataset and os.path.exists(raw_label_path):
        label_gt = load_labels(raw_label_path)  # (N1,)
        ConfusionMatrix = ConfusionMatrix(num_classes)
        ConfusionMatrix.increment_from_list(label_gt.flatten(),
                                            interpolated_label.flatten())
        ConfusionMatrix.print_metrics()

    label_path = os.path.join(dense_folder, fname_without_ext + '.labels')
    np.savetxt(label_path, interpolated_label, fmt="%d")
    del interpolated_label
    print('writing labels for %s' % fname_without_ext)

    # output pcd with from_dataset label
    dense_pcd = open3d.geometry.PointCloud()
    dense_pcd.points = open3d.utility.Vector3dVector(raw_points)
    dense_pcd.colors = open3d.utility.Vector3dVector(interpolated_color)
    del interpolated_color
    dense_pcd_path = os.path.join(
Пример #12
0
def get_iou(args, data_list, class_num, save_path=None):
    from multiprocessing import Pool
    from utils.metric import ConfusionMatrix

    ConfM = ConfusionMatrix(class_num)
    f = ConfM.generateM
    pool = Pool()
    m_list = pool.map(f, data_list)
    pool.close()
    pool.join()

    for m in m_list:
        ConfM.addM(m)

    aveJ, j_list, M = ConfM.jaccard()

    if args.dataset == 'pascal_voc':
        classes = np.array((
            'background',  # always index 0
            'aeroplane',
            'bicycle',
            'bird',
            'boat',
            'bottle',
            'bus',
            'car',
            'cat',
            'chair',
            'cow',
            'diningtable',
            'dog',
            'horse',
            'motorbike',
            'person',
            'pottedplant',
            'sheep',
            'sofa',
            'train',
            'tvmonitor'))
    elif args.dataset == 'pascal_context':
        classes = np.array(
            ('background', 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle',
             'bus', 'car', 'cat', 'chair', 'cow', 'table', 'dog', 'horse',
             'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train',
             'tvmonitor', 'bag', 'bed', 'bench', 'book', 'building', 'cabinet',
             'ceiling', 'cloth', 'computer', 'cup', 'door', 'fence', 'floor',
             'flower', 'food', 'grass', 'ground', 'keyboard', 'light',
             'mountain', 'mouse', 'curtain', 'platform', 'sign', 'plate',
             'road', 'rock', 'shelves', 'sidewalk', 'sky', 'snow',
             'bedclothes', 'track', 'tree', 'truck', 'wall', 'water', 'window',
             'wood'))
    elif args.dataset == 'cityscapes':
        classes = np.array(
            ("road", "sidewalk", "building", "wall", "fence", "pole",
             "traffic_light", "traffic_sign", "vegetation", "terrain", "sky",
             "person", "rider", "car", "truck", "bus", "train", "motorcycle",
             "bicycle"))

    for i, iou in enumerate(j_list):
        if j_list[i] > 0:
            print('class {:2d} {:12} IU {:.2f}'.format(i, classes[i],
                                                       j_list[i]))

    print('meanIOU: ' + str(aveJ) + '\n')
    if save_path:
        with open(save_path, 'w') as f:
            for i, iou in enumerate(j_list):
                f.write('class {:2d} {:12} IU {:.2f}'.format(
                    i, classes[i], j_list[i]) + '\n')
            f.write('meanIOU: ' + str(aveJ) + '\n')
Пример #13
0
    batch_size = flags.batch_size
    if flags.from_dataset == 'semantic':
        classes_in_model = 9
    else:
        classes_in_model = 10
    # load model
    resume_path = flags.resume_model
    model = select_model(flags.model_name, classes_in_model, hyper_params)[0]
    model = model.to(device)
    print("Resuming From ", resume_path)
    checkpoint = torch.load(resume_path)
    saved_state_dict = checkpoint['state_dict']
    model.load_state_dict(saved_state_dict)

    # Process each file
    cm = ConfusionMatrix(classes_in_model)
    common_cm = ConfusionMatrix(6)
    model = model.eval()
    # for visulization in tensorboard
    writer = SummaryWriter('runs/embedding_example')
    global_step = 0
    for file_data in dataset.list_file_data:
        print("Processing {}".format(file_data.file_path_without_ext))

        # Predict for num_samples times
        points_collector = []
        pd_labels_collector = []
        pd_prob_collector = []
        pd_common_labels_collector = []

        # If flags.num_samples < batch_size, will predict one batch