Esempio n. 1
0
            def val():
                model.eval()
                mean = np.array([0.485, 0.456, 0.406],
                                dtype=np.float32).reshape(1, 1, 3)
                std = np.array([0.229, 0.224, 0.225],
                               dtype=np.float32).reshape(1, 1, 3)
                t3 = time.time()
                res = []
                i = 1
                # file_lists = sorted(glob.glob(args.img_list))
                file_lists = open('val.txt')
                for file in tqdm(file_lists.readlines()):
                    torch.cuda.synchronize()
                    seg = file[:-2].split('_')
                    img = plt.imread(
                        '/mnt/raid/Talor/CSP_res2net/data/citypersons/images/val/'
                        + seg[0] + '/' + file[:-2]).astype(np.float32)
                    img_pre = preprocess(img[:, :, ::-1], mean, std)
                    img_pre = img_pre.cuda()

                    with torch.no_grad():
                        output = model(img_pre)[-1]
                    output['hm'].sigmoid_()
                    hm, wh, reg, attr = output['hm'], output['wh'], output[
                        'reg'], output['aed']

                    density = attr.pow(2).sum(dim=1, keepdim=True).sqrt()
                    diversity = torch.div(attr, density)
                    boxes = parse_det(hm,
                                      wh,
                                      reg,
                                      density=density,
                                      diversity=diversity,
                                      score=0.01,
                                      down=4)

                    if len(boxes) > 0:
                        boxes[:, [2, 3]] -= boxes[:, [0, 1]]

                        for box in boxes:
                            temp = dict()
                            temp['image_id'] = i
                            temp['category_id'] = 1
                            temp['bbox'] = box[:4].tolist()
                            temp['score'] = float(box[4])
                            res.append(temp)
                    i = i + 1

                with open('./_temp_val.json', 'w') as f:
                    json.dump(res, f)

                MRs = validate('./eval_city/val_gt.json', './_temp_val.json')
                t4 = time.time()
                print(
                    'Summerize: [Reasonable: %.2f%%], [Bare: %.2f%%], [Partial: %.2f%%], [Heavy: %.2f%%]'
                    % (MRs[0] * 100, MRs[1] * 100, MRs[2] * 100, MRs[3] * 100))
                print('Validation time used: %.3f' % (t4 - t3))
                return MRs[0]
def val(log=None):
    net.eval()

    if config.teacher:
        print('Load teacher params')
        student_dict = net.module.state_dict()
        net.module.load_state_dict(teacher_dict)

    print('Perform validation...')
    res = []
    t3 = time.time()
    for i, data in enumerate(testloader, 0):
        inputs = data.cuda()
        with torch.no_grad():
            pos, height, offset = net(inputs)

        boxes = parse_det_offset(pos.cpu().numpy(),
                                 height.cpu().numpy(),
                                 offset.cpu().numpy(),
                                 config.size_test,
                                 score=0.1,
                                 down=4,
                                 nms_thresh=0.5)
        if len(boxes) > 0:
            boxes[:, [2, 3]] -= boxes[:, [0, 1]]

            for box in boxes:
                temp = dict()
                temp['image_id'] = i + 1
                temp['category_id'] = 1
                temp['bbox'] = box[:4].tolist()
                temp['score'] = float(box[4])
                res.append(temp)

        print('\r%d/%d' % (i + 1, len(testloader))),
        sys.stdout.flush()
    print('')

    if config.teacher:
        print('Load back student params')
        net.module.load_state_dict(student_dict)

    with open('./_temp_val.json', 'w') as f:
        json.dump(res, f)

    MRs = validate('./eval_city/val_gt.json', './_temp_val.json')
    t4 = time.time()
    print(
        'Summerize: [Reasonable: %.2f%%], [Bare: %.2f%%], [Partial: %.2f%%], [Heavy: %.2f%%]'
        % (MRs[0] * 100, MRs[1] * 100, MRs[2] * 100, MRs[3] * 100))
    if log is not None:
        log.write("%.7f %.7f %.7f %.7f\n" % tuple(MRs))
    print('Validation time used: %.3f' % (t4 - t3))
    return MRs[0]
Esempio n. 3
0
def val(testloader, net, config, args, teacher_dict=None):
    net.eval()

    print('Perform validation...')
    res = []
    inference_time = 0
    num_images = len(testloader)
    for i, data in enumerate(testloader):
        inputs = data.cuda()
        with torch.no_grad():
            t1 = time.time()
            pos, height, offset = net(inputs)
            t2 = time.time()
            inference_time += (t2 - t1)

        boxes = parse_det_offset(pos.cpu().numpy(),
                                 height.cpu().numpy(),
                                 offset.cpu().numpy(),
                                 config.size_test,
                                 score=0.1,
                                 down=4,
                                 nms_thresh=0.5)
        if len(boxes) > 0:
            boxes[:, [2, 3]] -= boxes[:, [0, 1]]
            for box in boxes:
                temp = dict()
                temp['image_id'] = i + 1
                temp['category_id'] = 1
                temp['bbox'] = box[:4].tolist()
                temp['score'] = float(box[4])
                res.append(temp)

        print('\r%d/%d' % (i + 1, num_images), end='')
        sys.stdout.flush()

    temp_val = args.json_out
    with open(temp_val, 'w') as f:
        json.dump(res, f)

    MRs = validate('./eval_city/val_gt.json', temp_val)
    print(
        '\nSummerize:[Reasonable: %.2f%%], [Reasonable_small: %.2f%%], [Reasonable_occ=heavy: %.2f%%], [All: %.2f%%]'
        % (MRs[0] * 100, MRs[1] * 100, MRs[2] * 100, MRs[3] * 100))
    FPS = int(num_images / inference_time)
    print('FPS : {}'.format(FPS))
    return MRs
Esempio n. 4
0
def val(log=None):
    net1.eval()
    model.eval()
    # net.eval()

    print('Perform validation...')
    res = []
    t3 = time.time()
    for i, data in enumerate(testloader, 0):
        tt1 = time.time()
        inputs = data[0].cuda()

        img = plt.imread(data[1][0]).astype(np.float32)
        img_pre = preprocess(img[:, :, ::-1], mean, std)
        img_pre = img_pre.cuda()
        with torch.no_grad():
            pos1, height1, offset1 = net1(inputs)
            #  pos, height, offset = net(inputs)
            output = model(img_pre)[-1]
        output['hm'].sigmoid_()
        hm, wh, reg, attr = output['hm'], output['wh'], output['reg'], output[
            'aed']

        density = attr.pow(2).sum(dim=1, keepdim=True).sqrt()
        diversity = torch.div(attr, density)
        boxes = parse_det(hm,
                          wh,
                          reg,
                          density=density,
                          diversity=diversity,
                          score=0.01,
                          down=4)

        # boxes = parse_det_offset(pos.cpu().numpy(), height.cpu().numpy(), offset.cpu().numpy(), config.size_test, score=0.01, down=4, nms_thresh=0.5)
        boxes1 = parse_det_offset(pos1.cpu().numpy(),
                                  height1.cpu().numpy(),
                                  offset1.cpu().numpy(),
                                  config.size_test,
                                  score=0.01,
                                  down=4,
                                  nms_thresh=0.5)

        bb = list(boxes) + list(boxes1)
        # bb = list(boxes)
        boxes = np.asarray(bb, dtype=np.float32)
        keep = nms(boxes, 0.5, usegpu=False, gpu_id=0)
        boxes = boxes[keep, :]
        if len(boxes) > 0:
            boxes[:, [2, 3]] -= boxes[:, [0, 1]]

            for box in boxes:
                temp = dict()
                temp['image_id'] = i + 1
                temp['category_id'] = 1
                temp['bbox'] = box[:4].tolist()
                temp['score'] = float(box[4])
                res.append(temp)
        tt2 = time.time()
        print('\r%d/%d, %f' % (i + 1, len(testloader), tt2 - tt1)),
        sys.stdout.flush()
    print('')

    if config.teacher:
        print('Load back student params')
        net.module.load_state_dict(student_dict)

    with open('./_temp_val.json', 'w') as f:
        json.dump(res, f)

    MRs = validate('./eval_city/val_gt.json', './_temp_val.json')
    t4 = time.time()
    print(
        'Summerize: [Reasonable: %.2f%%], [Bare: %.2f%%], [Partial: %.2f%%], [Heavy: %.2f%%]'
        % (MRs[0] * 100, MRs[1] * 100, MRs[2] * 100, MRs[3] * 100))
    if log is not None:
        log.write("%.7f %.7f %.7f %.7f\n" % tuple(MRs))
    print('Validation time used: %.3f' % (t4 - t3))
    return MRs[0]
Esempio n. 5
0
def main():
    # BGR
    mean = np.array([0.485, 0.456, 0.406],
                    dtype=np.float32).reshape(1, 1, 3)
    std = np.array([0.229, 0.224, 0.225],
                   dtype=np.float32).reshape(1, 1, 3)

    args = parse_args()
    num_layers = 34
    heads = {'hm': 1, 'wh': 1, 'reg': 2, 'aed': 4}
    model = peddla_net(num_layers, heads, head_conv=256, down_ratio=4).cuda().eval()

    # load model
    # model = load_model(model, 'final.pth')
    model.load_state_dict(torch.load('./ckpt/CSID-5.pth'))
    # torch.cuda.empty_cache()
    
    t3 = time.time()
    res = []
    i = 1
    # file_lists = sorted(glob.glob(args.img_list))
    file_lists = open('val.txt')
    for file in tqdm(file_lists.readlines()):
        torch.cuda.synchronize()
        seg = file[:-2].split('_')
        img = plt.imread('/mnt/raid/Talor/CSP_res2net/data/citypersons/images/val/' + seg[0] + '/' + file[:-2]).astype(np.float32)
        img_pre = preprocess(img[:, :, ::-1], mean, std)
        img_pre = img_pre.cuda()

        with torch.no_grad():
            output = model(img_pre)[-1]
        output['hm'].sigmoid_()
        hm, wh, reg, attr = output['hm'], output['wh'], output['reg'], output['aed']

        density = attr.pow(2).sum(dim=1, keepdim=True).sqrt()
        diversity = torch.div(attr, density)
        # print(hm.shape, wh.shape, reg.shape, attr.shape, density.shape, diversity.shape)
        boxes = parse_det(hm, wh, reg, density=density, diversity=diversity, score=0.01, down=4)
        
        if len(boxes) > 0:
            boxes[:, [2, 3]] -= boxes[:, [0, 1]]

            for box in boxes:
                temp = dict()
                temp['image_id'] = i
                temp['category_id'] = 1
                temp['bbox'] = box[:4].tolist()
                temp['score'] = float(box[4])
                res.append(temp)
            # print('\r%d/%d' % (i, 500))
            # sys.stdout.flush()
        i = i + 1
        '''   
        if len(boxes) > 0:
            boxes[:, [2, 3]] -= boxes[:, [0, 1]]

            fig, ax = plt.subplots(1)
            ax.imshow(img)
            for i in range(len(boxes)):
                x, y, w, h, score = boxes[i]
                rect = patches.Rectangle((x, y), w, h, linewidth=1, edgecolor='r', facecolor='none')
                ax.add_patch(rect)
        else:
            plt.imshow(img)
        plt.show()
        '''
    with open('./_temp_val.json', 'w') as f:
        json.dump(res, f)

    MRs = validate('./eval_city/val_gt.json', './_temp_val.json')
    t4 = time.time()
    print('Summerize: [Reasonable: %.2f%%], [Bare: %.2f%%], [Partial: %.2f%%], [Heavy: %.2f%%]'
          % (MRs[0]*100, MRs[1]*100, MRs[2]*100, MRs[3]*100))
    print('Validation time used: %.3f' % (t4 - t3))
    return MRs[0]