コード例 #1
0
ファイル: create_datasets.py プロジェクト: pgrady3/GrabNet
def grab_new_objs(pkl_path, mano_path):
    rh_model = mano.load(model_path=mano_path,
                         model_type='mano',
                         num_pca_comps=45,
                         batch_size=1,
                         flat_hand_mean=True)

    rh_model_pkl = mano.load(model_path=mano_path,
                             model_type='mano',
                             num_pca_comps=15,
                             batch_size=1,
                             flat_hand_mean=False)

    all_samples = pickle.load(open(pkl_path, 'rb'))

    for idx, new_obj in enumerate(tqdm(all_samples)):
        print('idx', idx)
        ho = new_obj['ho_aug']

        # obj_centroid = ho.obj_verts.mean(0)
        # ho.obj_verts = np.array(ho.obj_verts) - obj_centroid
        # ho.hand_verts = np.array(ho.hand_verts) - obj_centroid
        # ho.hand_mTc = np.array(ho.hand_mTc)
        # ho.hand_mTc[:3, 3] = ho.hand_mTc[:3, 3] - obj_centroid

        opt_dict = util.convert_pca15_aa45(ho,
                                           mano_model_in=rh_model_pkl,
                                           mano_model_out=rh_model)
コード例 #2
0
def grab_new_objs(grabnet, objs_path, rot=True, n_samples=10, scale=1.):
    grabnet.coarse_net.eval()
    grabnet.refine_net.eval()

    rh_model = mano.load(model_path=grabnet.cfg.rhm_path,
                         model_type='mano',
                         num_pca_comps=45,
                         batch_size=n_samples,
                         flat_hand_mean=True).to(grabnet.device)

    grabnet.refine_net.rhm_train = rh_model

    grabnet.logger(f'################# \n'
                   f'Grabbing the object!'
                   )

    bps = bps_torch(custom_basis=grabnet.bps)

    if not isinstance(objs_path, list):
        objs_path = [objs_path]

    for new_obj in objs_path:

        rand_rotdeg = np.random.random([n_samples, 3]) * np.array([360, 360, 360])

        rand_rotmat = euler(rand_rotdeg)
        dorig = {'bps_object': [],
                 'verts_object': [],
                 'mesh_object': [],
                 'rotmat': []}

        for samples in range(n_samples):
            verts_obj, mesh_obj, rotmat = load_obj_verts(new_obj, rand_rotmat[samples], rndrotate=rot, scale=scale)

            bps_object = bps.encode(verts_obj, feature_type='dists')['dists']

            dorig['bps_object'].append(bps_object.to(grabnet.device))
            dorig['verts_object'].append(torch.from_numpy(verts_obj.astype(np.float32)).unsqueeze(0))
            dorig['mesh_object'].append(mesh_obj)
            dorig['rotmat'].append(rotmat)
            obj_name = os.path.basename(new_obj)

        dorig['bps_object'] = torch.cat(dorig['bps_object'])
        dorig['verts_object'] = torch.cat(dorig['verts_object'])

        save_dir = os.path.join(grabnet.cfg.work_dir, 'grab_new_objects')
        grabnet.logger(f'#################\n'
                       f'                   \n'
                       f'Saving results for the {obj_name.upper()}'
                       f'                      \n')

        gen_meshes = get_meshes(dorig=dorig,
                                coarse_net=grabnet.coarse_net,
                                refine_net=grabnet.refine_net,
                                rh_model=rh_model,
                                save=False,
                                save_dir=save_dir
                                )

        torch.save(gen_meshes, 'data/grabnet_data/meshes.pt')
コード例 #3
0
    def __init__(self, data_dir, train=False, min_num_cont=1):
        start_time = time.time()
        self.dataset = pickle.load(open(data_dir, 'rb'))  # Expensive step, can take up to 5 sec
        self.train = train
        self.aug_vert_jitter = 0.0005    # TODO, value?

        if 'num_verts_in_contact' in self.dataset[0]:
            print('Cutting samples less than {}. Was size {}'.format(min_num_cont, len(self.dataset)))
            self.dataset = [s for s in self.dataset if s['num_verts_in_contact'] >= min_num_cont]

        mano_path = '.'
        self.mano_model_out = mano.load(model_path=mano_path,
                             model_type='mano',
                             num_pca_comps=45,
                             batch_size=1,
                             flat_hand_mean=True)

        self.mano_model_in = mano.load(model_path=mano_path,
                                 model_type='mano',
                                 num_pca_comps=15,
                                 batch_size=1,
                                 flat_hand_mean=False)

        print('Dataset loaded in {:.2f} sec, {} samples'.format(time.time() - start_time, len(self.dataset)))
コード例 #4
0
    def __init__(self):
        self.obj_root = './models/HO3D_Object_models'

        obj_pc_dict, obj_face_dict, obj_scale_dict, obj_pc_resample_dict, obj_resample_faceid_dict = utils_HO3D_FPHA.load_objects_HO3D(
            self.obj_root)
        self.obj_pc_dict = obj_pc_dict
        self.obj_face_dict = obj_face_dict
        self.obj_scale_dict = obj_scale_dict
        self.obj_pc_resample_dict = obj_pc_resample_dict
        self.obj_resample_faceid_dict = obj_resample_faceid_dict
        self.nPoint = 3000

        self.obj_list = list(self.obj_pc_dict.keys())

        with torch.no_grad():
            self.rh_mano = mano.load(model_path='./models/mano/MANO_RIGHT.pkl',
                                     model_type='mano',
                                     use_pca=False,
                                     num_pca_comps=45,
                                     batch_size=1,
                                     flat_hand_mean=True)
            self.hand_faces = self.rh_mano.faces.astype(np.int32).reshape(
                (-1, 3))  # [1538, 3], face triangle indexes
コード例 #5
0
    def __init__(self, cfg, inference=False, evaluate=False):

        self.dtype = torch.float32

        torch.manual_seed(cfg.seed)

        starttime = datetime.now().replace(microsecond=0)
        makepath(cfg.work_dir, isfile=False)
        logger = makelogger(
            makepath(os.path.join(cfg.work_dir, '%s.log' % (cfg.expr_ID)),
                     isfile=True)).info
        self.logger = logger

        summary_logdir = os.path.join(cfg.work_dir, 'summaries')
        self.swriter = SummaryWriter(log_dir=summary_logdir)
        logger('[%s] - Started training GrabNet, experiment code %s' %
               (cfg.expr_ID, starttime))
        logger('tensorboard --logdir=%s' % summary_logdir)
        logger('Torch Version: %s\n' % torch.__version__)
        logger('Base dataset_dir is %s' % cfg.dataset_dir)

        # shutil.copy2(os.path.basename(sys.argv[0]), cfg.work_dir)

        use_cuda = torch.cuda.is_available()
        if use_cuda:
            torch.cuda.empty_cache()
        self.device = torch.device(
            "cuda:%d" % cfg.cuda_id if torch.cuda.is_available() else "cpu")

        gpu_brand = torch.cuda.get_device_name(
            cfg.cuda_id) if use_cuda else None
        gpu_count = torch.cuda.device_count() if cfg.use_multigpu else 1
        if use_cuda:
            logger('Using %d CUDA cores [%s] for training!' %
                   (gpu_count, gpu_brand))

        self.data_info = {}
        self.load_data(cfg, inference)

        with torch.no_grad():
            self.rhm_train = mano.load(model_path=cfg.rhm_path,
                                       model_type='mano',
                                       num_pca_comps=45,
                                       batch_size=cfg.batch_size // gpu_count,
                                       flat_hand_mean=True).to(self.device)

        self.coarse_net = CoarseNet().to(self.device)
        self.refine_net = RefineNet().to(self.device)

        self.LossL1 = torch.nn.L1Loss(reduction='mean')
        self.LossL2 = torch.nn.MSELoss(reduction='mean')

        if cfg.use_multigpu:
            self.coarse_net = nn.DataParallel(self.coarse_net)
            self.refine_net = nn.DataParallel(self.refine_net)
            logger("Training on Multiple GPU's")

        vars_cnet = [var[1] for var in self.coarse_net.named_parameters()]
        vars_rnet = [var[1] for var in self.refine_net.named_parameters()]

        cnet_n_params = sum(p.numel() for p in vars_cnet if p.requires_grad)
        rnet_n_params = sum(p.numel() for p in vars_rnet if p.requires_grad)
        logger('Total Trainable Parameters for CoarseNet is %2.2f M.' %
               ((cnet_n_params) * 1e-6))
        logger('Total Trainable Parameters for RefineNet is %2.2f M.' %
               ((rnet_n_params) * 1e-6))

        self.optimizer_cnet = optim.Adam(vars_cnet,
                                         lr=cfg.base_lr,
                                         weight_decay=cfg.reg_coef)
        self.optimizer_rnet = optim.Adam(vars_rnet,
                                         lr=cfg.base_lr,
                                         weight_decay=cfg.reg_coef)

        self.best_loss_cnet = np.inf
        self.best_loss_rnet = np.inf

        self.try_num = cfg.try_num
        self.epochs_completed = 0
        self.cfg = cfg
        self.coarse_net.cfg = cfg

        if cfg.best_cnet is not None:
            self._get_cnet_model().load_state_dict(torch.load(
                cfg.best_cnet, map_location=self.device),
                                                   strict=False)
            logger('Restored CoarseNet model from %s' % cfg.best_cnet)
        if cfg.best_rnet is not None:
            self._get_rnet_model().load_state_dict(torch.load(
                cfg.best_rnet, map_location=self.device),
                                                   strict=False)
            logger('Restored RefineNet model from %s' % cfg.best_rnet)

        # weights for contact, penetration and distance losses
        self.vpe = torch.from_numpy(np.load(cfg.vpe_path)).to(self.device).to(
            torch.long)
        rh_f = torch.from_numpy(self.rhm_train.faces.astype(np.int32)).view(
            1, -1, 3)
        self.rh_f = rh_f.repeat(self.cfg.batch_size, 1,
                                1).to(self.device).to(torch.long)

        v_weights = torch.from_numpy(np.load(cfg.c_weights_path)).to(
            torch.float32).to(self.device)
        v_weights2 = torch.pow(v_weights, 1.0 / 2.5)
        self.refine_net.v_weights = v_weights
        self.refine_net.v_weights2 = v_weights2
        self.refine_net.rhm_train = self.rhm_train

        self.v_weights = v_weights
        self.v_weights2 = v_weights2

        self.w_dist = torch.ones([self.cfg.batch_size,
                                  self.n_obj_verts]).to(self.device)
        self.contact_v = v_weights > 0.8
コード例 #6
0
    # load pre-trained model
    checkpoint_affordance = torch.load(
        args.affordance_model_path,
        map_location=torch.device('cpu'))['network']
    affordance_model.load_state_dict(checkpoint_affordance)
    affordance_model = affordance_model.to(device)
    checkpoint_cmap = torch.load(args.cmap_model_path,
                                 map_location=torch.device('cpu'))['network']
    cmap_model.load_state_dict(checkpoint_cmap)
    cmap_model = cmap_model.to(device)

    # dataset
    dataset = HO3D_diversity()
    dataloader = DataLoader(dataset=dataset,
                            batch_size=1,
                            shuffle=False,
                            num_workers=1)
    # mano hand model
    with torch.no_grad():
        rh_mano = mano.load(model_path='./models/mano/MANO_RIGHT.pkl',
                            model_type='mano',
                            use_pca=True,
                            num_pca_comps=45,
                            batch_size=1,
                            flat_hand_mean=True).to(device)
    rh_faces = torch.from_numpy(rh_mano.faces.astype(np.int32)).view(
        1, -1, 3).to(device)  # [1, 1538, 3], face indexes

    main(args, affordance_model, cmap_model, dataloader, device, rh_mano,
         rh_faces)
コード例 #7
0
ファイル: grab_pkl.py プロジェクト: pgrady3/GrabNet
def grab_new_objs(grabnet, pkl_path, rot=True, n_samples=5, scale=1.):
    grabnet.coarse_net.eval()
    grabnet.refine_net.eval()

    rh_model = mano.load(model_path=grabnet.cfg.rhm_path,
                         model_type='mano',
                         num_pca_comps=45,
                         batch_size=n_samples,
                         flat_hand_mean=True).to(grabnet.device)

    rh_model_pkl = mano.load(model_path=grabnet.cfg.rhm_path,
                             model_type='mano',
                             num_pca_comps=15,
                             batch_size=n_samples,
                             flat_hand_mean=False).to(grabnet.device)

    grabnet.refine_net.rhm_train = rh_model

    grabnet.logger(f'################# \n'
                   f'Colors Guide:'
                   f'                   \n'
                   f'Gray  --->  GrabNet generated grasp\n')

    bps = bps_torch(custom_basis=grabnet.bps)

    all_samples = pickle.load(open(pkl_path, 'rb'))

    if args.vis:
        print('Shuffling!!!')
        random.shuffle(all_samples)

    all_samples = all_samples[:args.num]
    all_data = []

    for idx, new_obj in enumerate(tqdm(all_samples)):
        print('idx', idx)
        ho = new_obj['ho_aug']

        obj_centroid = ho.obj_verts.mean(0)
        ho.obj_verts = np.array(ho.obj_verts) - obj_centroid
        ho.hand_verts = np.array(ho.hand_verts) - obj_centroid
        ho.hand_mTc = np.array(ho.hand_mTc)
        ho.hand_mTc[:3, 3] = ho.hand_mTc[:3, 3] - obj_centroid

        rand_rotdeg = np.random.random([n_samples, 3]) * np.array([0, 0, 0])

        rand_rotmat = euler(rand_rotdeg)
        dorig = {
            'bps_object': [],
            'verts_object': [],
            'mesh_object': [],
            'rotmat': []
        }

        for samples in range(n_samples):

            verts_obj, mesh_obj, rotmat = load_obj_verts(ho,
                                                         rand_rotmat[samples],
                                                         rndrotate=rot,
                                                         scale=scale)

            bps_object = bps.encode(verts_obj, feature_type='dists')['dists']

            dorig['bps_object'].append(bps_object.to(grabnet.device))
            dorig['verts_object'].append(
                torch.from_numpy(verts_obj.astype(np.float32)).unsqueeze(0))
            dorig['mesh_object'].append(mesh_obj)
            dorig['rotmat'].append(rotmat)
            obj_name = 'test1'

        dorig['bps_object'] = torch.cat(dorig['bps_object'])
        dorig['verts_object'] = torch.cat(dorig['verts_object'])

        save_dir = os.path.join(grabnet.cfg.work_dir, 'grab_new_objects')
        # grabnet.logger(f'#################\n'
        #                       f'                   \n'
        #                       f'Showing results for the {obj_name.upper()}'
        #                       f'                      \n')

        verts_out, joints_out = vis_results(ho,
                                            dorig=dorig,
                                            coarse_net=grabnet.coarse_net,
                                            refine_net=grabnet.refine_net,
                                            rh_model=rh_model,
                                            save=False,
                                            save_dir=save_dir,
                                            rh_model_pkl=rh_model_pkl,
                                            vis=args.vis)

        ho.obj_verts = np.array(ho.obj_verts) + obj_centroid
        ho.hand_verts = np.array(ho.hand_verts) + obj_centroid
        ho.hand_mTc = np.array(ho.hand_mTc)
        ho.hand_mTc[:3, 3] = ho.hand_mTc[:3, 3] + obj_centroid

        verts_out = np.array(
            verts_out.detach().squeeze().numpy()) + obj_centroid
        joints_out = np.array(
            joints_out.detach().squeeze().numpy()) + obj_centroid

        new_ho = hand_object.HandObject()
        new_ho.load_from_verts(verts_out, new_obj['ho_gt'].obj_faces,
                               new_obj['ho_gt'].obj_verts)
        all_data.append({
            'gt_ho': new_obj['ho_gt'],
            'in_ho': new_obj['ho_aug'],
            'out_verts': verts_out,
            'out_joints': joints_out
        })

    out_file = 'fitted_grabnet.pkl'
    print('Saving to {}. Len {}'.format(out_file, len(all_data)))
    pickle.dump(all_data, open(out_file, 'wb'))
コード例 #8
0
def inference(grabnet):

    grabnet.coarse_net.eval()
    grabnet.refine_net.eval()

    ds_name = 'test'
    mesh_base = '/ps/scratch/grab/data/object_meshes/contact_meshes'
    ds_test = LoadData(dataset_dir=grabnet.cfg.dataset_dir, ds_name=ds_name)
    n_samples = 5

    rh_model = mano.load(model_path=grabnet.cfg.rhm_path,
                         model_type='mano',
                         num_pca_comps=45,
                         batch_size=n_samples,
                         flat_hand_mean=True).to(grabnet.device)

    grabnet.refine_net.rhm_train = rh_model
    test_obj_names = np.unique(ds_test.frame_objs)

    grabnet.logger(f'################# \n'
                   f'Colors Guide:'
                   f'                   \n'
                   f'Red   --->  Reconstructed grasp - CoarseNet\n'
                   f'Green --->  Reconstructed grasp - Refinent\n'
                   f'Blue  --->  Ground Truth Grasp\n'
                   f'Pink  --->  Generated grasp - CoarseNet\n'
                   f'Gray  --->  Generated grasp - RefineNet\n')

    for obj in test_obj_names:

        obj_frames = np.where(ds_test.frame_objs == obj)[0]
        rnd_frames = np.random.choice(obj_frames.shape[0], n_samples)
        obj_data = ds_test[obj_frames[rnd_frames]]
        frame_data = {
            k: obj_data[k].to(grabnet.device)
            for k in obj_data.keys()
        }
        obj_meshes = []
        rotmats = []
        for frame in range(n_samples):
            rot_mat = frame_data['root_orient_obj_rotmat'][frame].cpu().numpy(
            ).reshape(3, 3).T
            transl = frame_data['trans_obj'][frame].cpu().numpy()

            obj_mesh = Mesh(filename=os.path.join(mesh_base, obj + '.ply'),
                            vc=name_to_rgb['yellow'])
            obj_mesh.rotate_vertices(rot_mat)
            obj_mesh.v += transl

            obj_meshes.append(obj_mesh)
            rotmats.append(rot_mat)
        frame_data['mesh_object'] = obj_meshes
        frame_data['rotmat'] = rotmats
        save_dir = os.path.join(grabnet.cfg.work_dir, 'test_grasp_results')
        grabnet.logger(f'#################\n'
                       f'                   \n'
                       f'Showing results for the {obj.upper()}'
                       f'                      \n')
        vis_results(dorig=frame_data,
                    coarse_net=grabnet.coarse_net,
                    refine_net=grabnet.refine_net,
                    rh_model=rh_model,
                    show_rec=True,
                    show_gen=True,
                    save=False,
                    save_dir=save_dir)