Example #1
0
def pack_arg():
    if os.path.exists(config.preprocessed_testset_pth):
        return
    cls_type = args.cls_type
    test_ds = LM_Dataset('test', cls_type=cls_type)
    test_loader = torch.utils.data.DataLoader(
        test_ds,
        batch_size=config.test_mini_batch_size,
        shuffle=False,
        num_workers=40,
        worker_init_fn=worker_init_fn)
    data_lst = []
    for i, data in tqdm.tqdm(enumerate(test_loader),
                             leave=False,
                             desc='Preprocessing valtestset'):
        bs, _, _, _ = data[0].shape
        for ibs in range(bs):
            # rgb, pcld, cld_rgb_nrm, choose, kp_targ_ofst, ctr_targ_ofst, cls_ids, RTs, labels, kp_3ds, ctr_3ds
            i_data = [item[ibs].numpy() for item in data]
            data_lst.append(i_data)

            # Dubug
            # rgb = i_data[0].transpose((1, 2, 0)).astype("uint8")[:,:,::-1].copy()
            # labels = i_data[-1].astype("uint8")
            # labels = np.repeat(labels[:, :, None], 3, 2)
            # msked_rgb = rgb * labels
            # imshow("msked_rgb", msked_rgb)
            # imshow("rgb", rgb.astype("uint8"))
            # waitKey(0)

    pkl.dump(data_lst,
             open(config.preprocessed_testset_ptn.format(cls_type), 'wb'))
Example #2
0
def main():
    if args.dataset == "ycb":
        test_ds = YCB_Dataset('test')
        obj_id = -1
    else:
        test_ds = LM_Dataset('test', cls_type=args.cls)
        obj_id = config.lm_obj_dict[args.cls]
    test_loader = torch.utils.data.DataLoader(
        test_ds,
        batch_size=config.test_mini_batch_size,
        shuffle=False,
        num_workers=20)

    rndla_cfg = ConfigRandLA
    model = FFB6D(n_classes=config.n_objects,
                  n_pts=config.n_sample_points,
                  rndla_cfg=rndla_cfg,
                  n_kps=config.n_keypoints)
    model.cuda()

    # load status from checkpoint
    if args.checkpoint is not None:
        load_checkpoint(model, None, filename=args.checkpoint[:-8])

    for i, data in tqdm.tqdm(enumerate(test_loader), leave=False, desc="val"):
        cal_view_pred_pose(model, data, epoch=i, obj_id=obj_id)
Example #3
0
def main():
    if args.dataset == "ycb":
        test_ds = YCB_Dataset('test')
        obj_id = -1
    else:
        test_ds = LM_Dataset('test', cls_type=args.cls)
        obj_id = config.lm_obj_dict[args.cls]
    test_loader = torch.utils.data.DataLoader(
        test_ds, batch_size=config.test_mini_batch_size, shuffle=False,
        num_workers=20
    )

    model = PVN3D(
        num_classes=config.n_objects, pcld_input_channels=6, pcld_use_xyz=True,
        num_points=config.n_sample_points
    ).cuda()
    model = convert_model(model)
    model.cuda()

    # load status from checkpoint
    if args.checkpoint is not None:
        checkpoint_status = load_checkpoint(
            model, None, filename=args.checkpoint[:-8]
        )
    model = nn.DataParallel(model)

    for i, data in tqdm.tqdm(
            enumerate(test_loader), leave=False, desc="val"
    ):
        cal_view_pred_pose(model, data, epoch=i, obj_id=obj_id)
Example #4
0
def pack_all():
    obj_lst = [
        'ape',
        'benchvise',
        'cam',
        'can',
        'cat',
        'driller',
        'duck',
        'eggbox',
        'glue',
        'holepuncher',
        'iron',
        'lamp',
        'phone',
    ]
    for cls_type in obj_lst:
        # test_ds = LM_Dataset('test', cls_type=args.cls_type)
        test_ds = LM_Dataset('test', cls_type=cls_type)
        test_loader = torch.utils.data.DataLoader(
            test_ds,
            batch_size=config.test_mini_batch_size,
            shuffle=False,
            num_workers=40,
            worker_init_fn=worker_init_fn)
        data_lst = []
        for i, data in tqdm.tqdm(enumerate(test_loader),
                                 leave=False,
                                 desc='Preprocessing valtestset'):
            bs, _, _, _ = data[0].shape
            for ibs in range(bs):
                # rgb, pcld, cld_rgb_nrm, choose, kp_targ_ofst, ctr_targ_ofst, cls_ids, RTs, labels, kp_3ds, ctr_3ds
                i_data = [item[ibs] for item in data]
                data_lst.append(i_data)

        pkl.dump(data_lst,
                 open(config.preprocessed_testset_ptn.format(cls_type), 'wb'))
Example #5
0
                            )

                        pbar = tqdm.tqdm(
                            total=eval_frequency, leave=False, desc="train"
                        )
                        pbar.set_postfix(dict(total_it=it))

                    self.viz.flush()

        return best_loss


if __name__ == "__main__":
    print("cls_type: ", args.cls)
    if not args.eval_net:
        train_ds = LM_Dataset('train', cls_type=args.cls)
        train_loader = torch.utils.data.DataLoader(
            train_ds, batch_size=config.mini_batch_size, shuffle=True,
            num_workers=20, worker_init_fn=worker_init_fn
        )
        val_ds = LM_Dataset('val', cls_type=args.cls)
        val_loader = torch.utils.data.DataLoader(
            val_ds, batch_size=config.val_mini_batch_size, shuffle=False,
            num_workers=10
        )
    else:
        if args.test_occ:
            test_ds = OCC_LM_Dataset('test', cls_type=args.cls)
            test_loader = torch.utils.data.DataLoader(
                test_ds, batch_size=config.test_mini_batch_size, shuffle=False,
                num_workers=10