예제 #1
0
def main():
    args, cfg = parse_config()
    logger = common_utils.create_logger()
    logger.info(
        '-----------------Quick Demo of OpenPCDet-------------------------')
    demo_dataset = DemoDataset(dataset_cfg=cfg.DATA_CONFIG,
                               class_names=cfg.CLASS_NAMES,
                               training=False,
                               root_path=Path(args.data_path),
                               ext=args.ext,
                               logger=logger)
    logger.info(f'Total number of samples: \t{len(demo_dataset)}')

    model = build_network(model_cfg=cfg.MODEL,
                          num_class=len(cfg.CLASS_NAMES),
                          dataset=demo_dataset)
    model.load_params_from_file(filename=args.ckpt, logger=logger, to_cpu=True)
    model.cuda()
    model.eval()
    with torch.no_grad():
        for idx, data_dict in enumerate(demo_dataset):
            logger.info(f'Visualized sample index: \t{idx + 1}')
            data_dict = demo_dataset.collate_batch([data_dict])
            load_data_to_gpu(data_dict)
            pred_dicts, _ = model.forward(data_dict)

            V.draw_scenes(points=data_dict['points'][:, 1:],
                          ref_boxes=pred_dicts[0]['pred_boxes'],
                          ref_scores=pred_dicts[0]['pred_scores'],
                          ref_labels=pred_dicts[0]['pred_labels'])
            #mlab.show(stop=True)
            mlab.savefig(filename='test.png')

    logger.info('Demo done.')
def main():

    args, cfg = parse_config()
    logger = common_utils.create_logger()
    logger.info('-----------------Quick Demo of OpenPCDet-------------------------')
    demo_dataset = DemoDataset(
        dataset_cfg=cfg.DATA_CONFIG, class_names=cfg.CLASS_NAMES, training=False,
        root_path=Path(args.data_path), ext=args.ext, logger=logger
    )
    logger.info(f'Total number of samples: \t{len(demo_dataset)}')

    if args.saved_pred == "":
        
        model = build_network(model_cfg=cfg.MODEL, num_class=len(cfg.CLASS_NAMES), dataset=demo_dataset)
        model.load_params_from_file(filename=args.ckpt, logger=logger, to_cpu=True)
        model.cuda()
        model.eval()  

        with torch.no_grad():
            for idx, data_dict in enumerate(demo_dataset):
                logger.info(f'Visualized sample index: \t{idx + 1}')
                data_dict = demo_dataset.collate_batch([data_dict])
                load_data_to_gpu(data_dict)
                pred_dicts, _ = model.forward(data_dict)

            with open('../saved_pred/curr_pickle.pkl', 'wb+') as f:
                data_ = {
                    "data_dict": data_dict['points'][:, 1:],
                    "pred_dicts": pred_dicts
                }
                pkl.dump(data_, f) 

    else:

        with open('../saved_pred/curr_pickle.pkl', 'rb') as f:
            data_ = pkl.load(f)

        data_dict = data_["data_dict"]
        pred_dicts = data_["pred_dicts"]

        vdisplay = Xvfb(width=1920, height=1080)
        vdisplay.start()  
        V.draw_scenes(
            points=data_dict['points'][:, 1:], ref_boxes=pred_dicts[0]['pred_boxes'],
            ref_scores=pred_dicts[0]['pred_scores'], ref_labels=pred_dicts[0]['pred_labels']
        )
        vdisplay.stop()

        mlab.show(stop=True)
        mlab.savefig("./test_eg.png")

    logger.info('Demo done.')
예제 #3
0
파일: demo.py 프로젝트: Gltina/OpenPCDet
def main():
    args, cfg = parse_config()
    logger = common_utils.create_logger()
    logger.info(
        '-----------------Quick Demo of OpenPCDet-------------------------')
    demo_dataset = DemoDataset(dataset_cfg=cfg.DATA_CONFIG,
                               class_names=cfg.CLASS_NAMES,
                               training=False,
                               root_path=Path(args.data_path),
                               ext=args.ext,
                               logger=logger)
    logger.info(f'Total number of samples: \t{len(demo_dataset)}')
    data_name_list = demo_dataset.sample_file_list
    # print(data_name_list)
    print('evaluation data size=', len(data_name_list))

    model = build_network(model_cfg=cfg.MODEL,
                          num_class=len(cfg.CLASS_NAMES),
                          dataset=demo_dataset)
    model.load_params_from_file(filename=args.ckpt, logger=logger, to_cpu=True)
    model.cuda()
    model.eval()

    with torch.no_grad():
        for idx, data_dict in enumerate(demo_dataset):
            # logger.info(f'Visualized sample index: \t{idx + 1}')
            logger.info(f'Detecte sample: \t{data_name_list[idx]}')
            data_dict = demo_dataset.collate_batch([data_dict])
            load_data_to_gpu(data_dict)
            pred_dicts, _ = model.forward(data_dict)

            print(pred_dicts)
            # print(data_dict)
            # print(type(pred_dicts[0]['pred_boxes']))
            # print(pred_dicts[0]['pred_boxes'])
            res = pred_dicts[0]['pred_boxes'].cpu().numpy().round(8)
            save_filename = str(data_name_list[idx])
            np.savetxt('evaluation/' +
                       save_filename[save_filename.rfind('/') + 1:].replace(
                           '.bin', '.txt'),
                       res,
                       fmt='%.08f')
            # test_f.writelines(pred_dicts[0]['pred_boxes'])

            V.draw_scenes(points=data_dict['points'][:, 1:],
                          ref_boxes=pred_dicts[0]['pred_boxes'],
                          ref_scores=pred_dicts[0]['pred_scores'],
                          ref_labels=pred_dicts[0]['pred_labels'])
            mlab.show(stop=True)

    logger.info('Demo done.')
예제 #4
0
    def __vis_fake__(points, gt_boxes, ref_boxes=None, scores=None, use_fakelidar=True):
        import visual_utils.visualize_utils as vis
        import mayavi.mlab as mlab
        gt_boxes = copy.deepcopy(gt_boxes)
        if use_fakelidar:
            gt_boxes = box_utils.boxes3d_kitti_lidar_to_fakelidar(gt_boxes)

        if ref_boxes is not None:
            ref_boxes = copy.deepcopy(ref_boxes)
            if use_fakelidar:
                ref_boxes = box_utils.boxes3d_kitti_lidar_to_fakelidar(ref_boxes)

        vis.draw_scenes(points, gt_boxes, ref_boxes=ref_boxes, ref_scores=scores)
        mlab.show(stop=True)
예제 #5
0
def main():
    args, cfg = parse_config()
    logger = common_utils.create_logger()  #logger记录日志
    logger.info(
        '-----------------Quick Demo of OpenPCDet-------------------------')
    #建立一个DemoDataset类,其中储存关于输入数据的所有信息,包含六个参数
    # dataset_cfg=cfg.DATA_CONFIG # 数据参数
    # 包含数据集 / 数据路径 / 信息路径 / 数据处理器 / 数据增强器等
    # class_names=cfg.CLASS_NAMES # 类别名
    # training=False # 是否训练
    # root_path=Path(args.data_path) # 数据路径
    # ext=args.ext # 扩展
    # logger=logger # 日志
    demo_dataset = DemoDataset(dataset_cfg=cfg.DATA_CONFIG,
                               class_names=cfg.CLASS_NAMES,
                               training=False,
                               root_path=Path(args.data_path),
                               ext=args.ext,
                               logger=logger)

    logger.info(f'Total number of samples: \t{len(demo_dataset)}')

    model = build_network(model_cfg=cfg.MODEL,
                          num_class=len(cfg.CLASS_NAMES),
                          dataset=demo_dataset)
    model.load_params_from_file(filename=args.ckpt, logger=logger, to_cpu=True)
    model.cuda()
    model.eval()
    with torch.no_grad():  #目的是使得其中的数据不需要计算梯度,也不会进行反向传播
        for idx, data_dict in enumerate(demo_dataset):
            logger.info(f'Visualized sample index: \t{idx + 1}')
            data_dict = demo_dataset.collate_batch([data_dict])
            load_data_to_gpu(data_dict)
            pred_dicts, _ = model.forward(data_dict)

            V.draw_scenes(points=data_dict['points'][:, 1:],
                          ref_boxes=pred_dicts[0]['pred_boxes'],
                          ref_scores=pred_dicts[0]['pred_scores'],
                          ref_labels=pred_dicts[0]['pred_labels'])
            mlab.show(stop=True)

    logger.info('Demo done.')
예제 #6
0
def main():
    # 1 输入参数
    args, cfg = parse_config()  # cfg的参数在tools/cfg/kitti_models/pv-rcnn.yaml
    logger = common_utils.create_logger()
    logger.info(
        '-----------------Quick Demo of OpenPCDet-------------------------')
    demo_dataset = DemoDataset(dataset_cfg=cfg.DATA_CONFIG,
                               class_names=cfg.CLASS_NAMES,
                               training=False,
                               root_path=Path(args.data_path),
                               ext=args.ext,
                               logger=logger)
    logger.info(f'Total number of samples: \t{len(demo_dataset)}')

    # 2 调用的这些包就是pcdet/models/detectors下的各个py文件,
    model = build_network(model_cfg=cfg.MODEL,
                          num_class=len(cfg.CLASS_NAMES),
                          dataset=demo_dataset)
    # 3 参数加载
    model.load_params_from_file(filename=args.ckpt, logger=logger, to_cpu=True)
    # cuda( ) 和 eval( ) 都是数据处理
    model.cuda()
    model.eval()
    with torch.no_grad():
        for idx, data_dict in enumerate(demo_dataset):
            logger.info(f'Visualized sample index: \t{idx + 1}')  # 样本数
            # 4. collate_batch
            data_dict = demo_dataset.collate_batch([data_dict])
            load_data_to_gpu(data_dict)  # 传递数据给gpu的
            pred_dicts, _ = model.forward(
                data_dict
            )  #  在神经网络中向前传递数据data_dict,得到预测数据pred_dicts     定位到forward,因为是PVRCNN类下的函数,先看__init__  /home/hcq/pointcloud/PCDet/pcdet/models/detectors/pv_rcnn.py
            # 可视化V
            V.draw_scenes(points=data_dict['points'][:, 1:],
                          ref_boxes=pred_dicts[0]['pred_boxes'],
                          ref_scores=pred_dicts[0]['pred_scores'],
                          ref_labels=pred_dicts[0]['pred_labels'])
            mlab.show(stop=True)

    logger.info('Demo done.')
def main():

    args, cfg = parse_config()

    # Creating output dir if it does not already exist
    Path(args.output_dir).mkdir(parents=True, exist_ok=True)

    for res_file in os.listdir(args.saved_pred):

        file_name_parts_ = res_file.split('.')

        if file_name_parts_[-1] == 'pkl':

            with open('%s/%s.pkl' % (args.saved_pred, file_name_parts_[0]),
                      'rb') as f:
                data_ = pkl.load(f)

            data_dict = data_["data_dict"]
            #        pred_dicts = data_["pred_dicts"]
            pred_boxes = data_["pred_boxes"]
            pred_labels = data_["pred_labels"]
            pred_scores = data_["pred_scores"]
            gt_boxes = data_["gt_boxes"]

            pred_dicts = list()
            d = dict()
            d["pred_boxes"] = pred_boxes
            d["pred_labels"] = pred_labels
            d["pred_scores"] = pred_scores

            pred_dicts.append(d)

            Rot_matrix = np.array([[0, 0, 1], [-1, 0, 0], [0, -1, 0]])

            gt_boxes[:, 0:3] = (Rot_matrix @ gt_boxes[:, 0:3].T).T
            gt_boxes[:, 3:6] = gt_boxes[:, [4, 5, 3]]
            gt_boxes[:, 2] = gt_boxes[:, 2] + gt_boxes[:, 5] / 2

            fig = V.draw_scenes(points=data_dict,
                                gt_boxes=gt_boxes,
                                ref_boxes=pred_dicts[0]['pred_boxes'],
                                ref_scores=pred_dicts[0]['pred_scores'],
                                ref_labels=pred_dicts[0]['pred_labels'])

            mlab.show(stop=True)
            mlab.savefig("%s/%s.png" % (args.output_dir, file_name_parts_[0]))
예제 #8
0
def main():

    args, cfg = parse_config()

    if args.saved_pred == "":
        
        logger = common_utils.create_logger()
        logger.info('-----------------Quick Demo of OpenPCDet-------------------------')
        demo_dataset = DemoDataset(
            dataset_cfg=cfg.DATA_CONFIG, class_names=cfg.CLASS_NAMES, training=False,
            root_path=Path(args.seq_path), ext=args.ext, logger=logger
        )
        logger.info(f'Total number of samples: \t{len(demo_dataset)}')

        model = build_network(model_cfg=cfg.MODEL, num_class=len(cfg.CLASS_NAMES), dataset=demo_dataset)
        model.load_params_from_file(filename=args.ckpt, logger=logger, to_cpu=True)
        model.cuda()
        model.eval()  

        with torch.no_grad():
            for idx, data_dict in enumerate(demo_dataset):
                logger.info(f'Visualized sample index: \t{idx + 1}')
                data_dict = demo_dataset.collate_batch([data_dict])
                load_data_to_gpu(data_dict)
                pred_dicts, _ = model.forward(data_dict)

                # Creating output dir if it does not already exist
                Path(args.output_dir).mkdir(parents=True, exist_ok=True)

                with open('%s/curr_pickle_%s.pkl' % (args.output_dir, str(idx)), 'wb+') as f:
                    data_ = {
                        "data_dict": data_dict['points'][:, 1:].cpu().detach().numpy(),
                        "pred_boxes": pred_dicts[0]["pred_boxes"].cpu().detach().numpy(),
                        "pred_labels": pred_dicts[0]["pred_labels"].cpu().detach().numpy(),
                        "pred_scores": pred_dicts[0]["pred_scores"].cpu().detach().numpy()		
                    }
                    pkl.dump(data_, f) 

    else:

        # Creating output dir if it does not already exist
        Path(args.output_dir).mkdir(parents=True, exist_ok=True)
        # ctr = 0
        for res_file in os.listdir(args.saved_pred):
            # ctr = ctr + 1
            # if ctr == 15:
            #     break

            file_name_parts_ = res_file.split('.')

            if file_name_parts_[-1] == 'pkl':

                with open('%s/%s.pkl' % (args.saved_pred, file_name_parts_[0]), 'rb') as f:
                    data_ = pkl.load(f)

                data_dict = data_["data_dict"]
        #        pred_dicts = data_["pred_dicts"]
                pred_boxes=data_["pred_boxes"]
                pred_labels=data_["pred_labels"]
                pred_scores=data_["pred_scores"]
                gt_boxes=data_["gt_boxes"]
                
                pred_dicts=list()
                d=dict()
                d["pred_boxes"] = pred_boxes
                d["pred_labels"] = pred_labels
                d["pred_scores"] = pred_scores
                
                pred_dicts.append(d)

                Rot_matrix = np.array([
                    [0, 0, 1],
                    [-1, 0, 0],
                    [0, -1, 0]
                ])

                gt_boxes[:, 0:3] = (Rot_matrix @ gt_boxes[:, 0:3].T).T
                gt_boxes[:, 3:6] = gt_boxes[:, [4, 5, 3]]
                gt_boxes[:, 2] = gt_boxes[:, 2] + gt_boxes[:, 5]/2
                # rot_y_matrix = np.array([
                #     [np.cos(gt_boxes[:, 6]), 0, -np.sin(gt_boxes[:, 6])],
                #     [0,  1, 0],
                #     [np.sin(gt_boxes[:, 6]), 0, np.cos(gt_boxes[:, 6])]
                # ])
                # gt_boxes[:, 6] = (Rot_matrix @ gt_boxes[:, 0:3].T).T

                fig = V.draw_scenes(
                    points=data_dict, 
                    gt_boxes=gt_boxes,
                    ref_boxes=pred_dicts[0]['pred_boxes'],
                    ref_scores=pred_dicts[0]['pred_scores'], 
                    ref_labels=pred_dicts[0]['pred_labels']
                )

                mlab.show(stop=True)
                mlab.savefig("%s/%s.png"%(args.output_dir, file_name_parts_[0]))
예제 #9
0
def main():

    args, cfg = parse_config()
    logger = common_utils.create_logger()
    logger.info('-----------------Quick Demo of OpenPCDet-------------------------')

    demo_dataset = NuScenesDataset(
        dataset_cfg=cfg.DATA_CONFIG, class_names=cfg.CLASS_NAMES, training=False,
        root_path=Path(args.data_path), logger=logger
    )
    logger.info(f'Total number of samples: \t{len(demo_dataset)}')

    model = build_network(model_cfg=cfg.MODEL, num_class=len(cfg.CLASS_NAMES), dataset=demo_dataset)
    model.load_params_from_file(filename=args.ckpt, logger=logger, to_cpu=True)
    model.cuda()
    model.eval()
    start = time.time()
    with torch.no_grad():
        data_dict = demo_dataset[args.idx]
        print(type(data_dict))
        print(data_dict)
        
        logger.info(f'Visualized sample index: \t{args.idx}')
        data_dict = demo_dataset.collate_batch([data_dict])
        # print(type(data_dict))
        # print(data_dict)
        load_data_to_gpu(data_dict)
        pred_dicts, _ = model.forward(data_dict)
        # for bb in pred_dicts[0]['pred_boxes']:
        #     for x in bb:
        #         print(float(x), end="  ")
        #     print()

        # pred_dicts[0]['pred_boxes'][:,0] = 1
        # pred_dicts[0]['pred_boxes'][:,1] = 0
        # pred_dicts[0]['pred_boxes'][:,2] = 1
        # pred_dicts[0]['pred_boxes'][:,3] = 1
        # pred_dicts[0]['pred_boxes'][:,4] = 1
        # pred_dicts[0]['pred_boxes'][:,5] = 1
        # pred_dicts[0]['pred_boxes'][:,6] = 0
        
        # pred_dicts[0]['pred_boxes'] = pred_dicts[0]['pred_boxes'].cpu().numpy()
        # foo = np.zeros((len(pred_dicts[0]['pred_boxes']), 2), dtype=pred_dicts[0]['pred_boxes'].dtype)
        # pred_dicts[0]['pred_boxes'] = np.concatenate((pred_dicts[0]['pred_boxes'], foo), axis=1)


        # pred_dicts[0]['pred_boxes'][:,7] = 0
        # pred_dicts[0]['pred_boxes'][:,8] = 0

        mask = (pred_dicts[0]['pred_scores']>0.3).float()
        indices = torch.nonzero(mask)
        V.draw_scenes(
            points=data_dict['points'][:, 1:],
            ref_boxes=pred_dicts[0]['pred_boxes'][indices].reshape(-1, 9),
            ref_scores=pred_dicts[0]['pred_scores'][indices].reshape(-1), ref_labels=pred_dicts[0]['pred_labels'][indices].reshape(-1)
        )

        # mask = (pred_dicts[0]['pred_scores']>0.5).float()
        # indices = torch.nonzero(mask)
        # V.draw_scenes(
        #     points=data_dict['points'][:, 1:], ref_boxes=pred_dicts[0]['pred_boxes'][indices].reshape(-1, 7),
        #     ref_scores=pred_dicts[0]['pred_scores'][indices].reshape(-1), ref_labels=pred_dicts[0]['pred_labels'][indices].reshape(-1)
        # )


        # print(pred_dicts[0]['pred_boxes'][indices])

        # V.draw_scenes(
        #     points=data_dict['points'][:, 1:], ref_boxes=pred_dicts[0]['pred_boxes'],
        #     ref_scores=pred_dicts[0]['pred_scores'], ref_labels=pred_dicts[0]['pred_labels']
        # )

        mlab.show(stop=True)

    end = time.time()
    print(end-start)

    logger.info('Demo done.')
예제 #10
0
def main():
    args, cfg = parse_config()
    cfg.ROOT_DIR = Path(cfg.DATA_CONFIG.DATA_PATH)
    logger = common_utils.create_logger()
    dist_test = False
    total_gpus = 1

    if args.batch_size is None:
        args.batch_size = cfg.OPTIMIZATION.BATCH_SIZE_PER_GPU
    else:
        assert args.batch_size % total_gpus == 0, 'Batch size should match the number of gpus'
        args.batch_size = args.batch_size // total_gpus

    logger.info(
        '-----------------Quick Demo of OpenPCDet-------------------------')
    test_set, test_loader, sampler = build_dataloader(
        dataset_cfg=cfg.DATA_CONFIG,
        class_names=cfg.CLASS_NAMES,
        batch_size=args.batch_size,
        dist=dist_test,
        workers=args.workers,
        logger=logger,
        training=False)

    model = build_network(model_cfg=cfg.MODEL,
                          num_class=len(cfg.CLASS_NAMES),
                          dataset=test_set)
    model.load_params_from_file(filename=args.ckpt,
                                logger=logger,
                                to_cpu=False)
    model.cuda()
    model.eval()
    with torch.no_grad():
        for idx, batch_dict in enumerate(test_loader):
            logger.info(f'Visualized sample index: \t{idx + 1}')
            load_data_to_gpu(batch_dict)
            pred_dicts, _ = model(batch_dict)

            filtered_gt_boxes = batch_dict['gt_boxes'][0].cpu().numpy()

            mask = box_utils.mask_boxes_outside_range_numpy(
                filtered_gt_boxes, test_loader.dataset.point_cloud_range)
            filtered_gt_boxes = filtered_gt_boxes[mask]

            if args.show_heatmap:
                pass
            if 'pred_keypoints' in pred_dicts[0]:
                pred_keypoints = pred_dicts[0]['pred_keypoints']
            else:
                pred_keypoints = None
            V.draw_scenes(points=batch_dict['points'][:, 1:],
                          gt_boxes=filtered_gt_boxes[:, :-1],
                          ref_boxes=pred_dicts[0]['pred_boxes'],
                          ref_scores=pred_dicts[0]['pred_scores'],
                          ref_labels=pred_dicts[0]['pred_labels'],
                          gt_labels=filtered_gt_boxes[:, -1],
                          class_names=test_loader.dataset.class_names,
                          pred_keypoints=pred_keypoints)
            mlab.show(stop=True)

    logger.info('Demo done.')