def __init__(self, cfg, logger=None, **params):

        self.cfg = cfg
        self.grab_path = cfg.grab_path
        self.out_path = cfg.out_path
        makepath(self.out_path)

        if logger is None:
            log_dir = os.path.join(self.out_path, 'grab_preprocessing.log')
            self.logger = makelogger(log_dir=log_dir, mode='a').info
        else:
            self.logger = logger
        self.logger('Starting data preprocessing !')

        assert cfg.intent in INTENTS

        self.intent = cfg.intent
        self.logger('intent:%s --> processing %s sequences!' % (self.intent, self.intent))

        if cfg.splits is None:
            self.splits = {'test': ['mug', 'wineglass', 'camera', 'binoculars', 'fryingpan', 'toothpaste'],
                            'val': ['apple', 'toothbrush', 'elephant', 'hand'],
                            'train': []}
        else:
            assert isinstance(cfg.splits, dict)
            self.splits = cfg.splits
            
        self.all_seqs = glob.glob(self.grab_path + '/*/*.npz')
        
        ## to be filled 
        self.selected_seqs = []
        self.obj_based_seqs = {}
        self.sbj_based_seqs = {}
        self.split_seqs = {'test': [],
                           'val': [],
                           'train': []
                           }

        # group, mask, and sort sequences based on objects, subjects, and intents
        self.process_sequences()

        self.logger('Total sequences: %d' % len(self.all_seqs))
        self.logger('Selected sequences: %d' % len(self.selected_seqs))
        self.logger('Number of sequences in each data split : train: %d , test: %d , val: %d'
                         %(len(self.split_seqs['train']), len(self.split_seqs['test']), len(self.split_seqs['val'])))
        self.logger('Number of objects in each data split : train: %d , test: %d , val: %d'
                         % (len(self.splits['train']), len(self.splits['test']), len(self.splits['val'])))

        # process the data
        self.data_preprocessing(cfg)
Exemple #2
0
def save_grab_vertices(cfg, logger=None, **params):

    grab_path = cfg.grab_path
    out_path = cfg.out_path
    makepath(out_path)

    if logger is None:
        logger = makelogger(log_dir=os.path.join(out_path,
                                                 'grab_preprocessing.log'),
                            mode='a').info
    else:
        logger = logger
    logger('Starting to get vertices for GRAB!')

    all_seqs = glob.glob(grab_path + '/*/*.npz')

    logger('Total sequences: %d' % len(all_seqs))

    # stime = datetime.now().replace(microsecond=0)
    # shutil.copy2(sys.argv[0],
    #              os.path.join(out_path,
    #                           os.path.basename(sys.argv[0]).replace('.py','_%s.py' % datetime.strftime(stime,'%Y%m%d_%H%M'))))

    if out_path is None:
        out_path = grab_path

    for sequence in tqdm(all_seqs):

        outfname = makepath(sequence.replace(grab_path, out_path).replace(
            '.npz', '_verts_body.npz'),
                            isfile=True)

        action_name = os.path.basename(sequence)
        if os.path.exists(outfname):
            logger('Results for %s split already exist.' % (action_name))
            continue
        else:
            logger('Processing data for %s split.' % (action_name))

        seq_data = parse_npz(sequence)
        n_comps = seq_data['n_comps']
        gender = seq_data['gender']

        T = seq_data.n_frames

        if cfg.save_body_verts:

            sbj_mesh = os.path.join(grab_path, '..', seq_data.body.vtemp)
            sbj_vtemp = np.array(Mesh(filename=sbj_mesh).vertices)

            sbj_m = smplx.create(model_path=cfg.model_path,
                                 model_type='smplx',
                                 gender=gender,
                                 num_pca_comps=n_comps,
                                 v_template=sbj_vtemp,
                                 batch_size=T)

            sbj_parms = params2torch(seq_data.body.params)
            verts_sbj = to_cpu(sbj_m(**sbj_parms).vertices)
            np.savez_compressed(outfname, verts_body=verts_sbj)

        if cfg.save_lhand_verts:
            lh_mesh = os.path.join(grab_path, '..', seq_data.lhand.vtemp)
            lh_vtemp = np.array(Mesh(filename=lh_mesh).vertices)

            lh_m = smplx.create(model_path=cfg.model_path,
                                model_type='mano',
                                is_rhand=False,
                                v_template=lh_vtemp,
                                num_pca_comps=n_comps,
                                flat_hand_mean=True,
                                batch_size=T)

            lh_parms = params2torch(seq_data.lhand.params)
            verts_lh = to_cpu(lh_m(**lh_parms).vertices)
            np.savez_compressed(outfname.replace('_verts_body.npz',
                                                 '_verts_lhand.npz'),
                                verts_body=verts_lh)

        if cfg.save_rhand_verts:
            rh_mesh = os.path.join(grab_path, '..', seq_data.rhand.vtemp)
            rh_vtemp = np.array(Mesh(filename=rh_mesh).vertices)

            rh_m = smplx.create(model_path=cfg.model_path,
                                model_type='mano',
                                is_rhand=True,
                                v_template=rh_vtemp,
                                num_pca_comps=n_comps,
                                flat_hand_mean=True,
                                batch_size=T)

            rh_parms = params2torch(seq_data.body.params)
            verts_rh = to_cpu(rh_m(**rh_parms).vertices)
            np.savez_compressed(outfname.replace('_verts_body.npz',
                                                 '_verts_rhand.npz'),
                                verts_body=verts_rh)

        if cfg.save_object_verts:

            obj_mesh = os.path.join(grab_path, '..',
                                    seq_data.object.object_mesh)
            obj_vtemp = np.array(Mesh(filename=obj_mesh).vertices)
            sample_id = np.random.choice(obj_vtemp.shape[0],
                                         cfg.n_verts_sample,
                                         replace=False)
            obj_m = ObjectModel(v_template=obj_vtemp[sample_id], batch_size=T)
            obj_parms = params2torch(seq_data.object.params)
            verts_obj = to_cpu(obj_m(**obj_parms).vertices)
            np.savez_compressed(outfname.replace('_verts_body.npz',
                                                 '_verts_object.npz'),
                                verts_object=verts_obj)

        logger('Processing finished')
Exemple #3
0
    # model_path = 'PATH_TO_DOWNLOADED_MODELS_FROM_SMPLX_WEBSITE/'

    cfg = {
        'save_body_verts':
        True,  # if True, will compute and save the body vertices in the specified path
        'save_object_verts': True,
        'save_lhand_verts': False,
        'save_rhand_verts': False,

        # number of vertices samples for each object
        'n_verts_sample': 1024,

        #IO path
        'grab_path': grab_path,
        'out_path': out_path,

        # body and hand model path
        'model_path': model_path,
    }

    log_dir = os.path.join(out_path, 'grab_processing.log')
    logger = makelogger(log_dir=log_dir, mode='a').info
    logger(msg)

    cwd = os.getcwd()
    default_cfg_path = os.path.join(cwd, '../configs/get_vertices_cfg.yaml')
    cfg = Config(default_cfg_path=default_cfg_path, **cfg)
    cfg.write_cfg(write_path=cfg.out_path + '/get_vertices_cfg.yaml')

    save_grab_vertices(cfg, logger)