Exemplo n.º 1
0
    def save_output(self, names, pose_, betas_, trans_, corr_, vcs, save_name,
                    epoch, it):
        from psbody.mesh import Mesh
        from lib.smpl_paths import SmplPaths

        sp = SmplPaths(gender='male')
        smpl = sp.get_smpl()
        for nam, p, b, t, c, vc in zip(names, pose_, betas_, trans_, corr_,
                                       vcs):
            name = split(nam)[1]
            smpl.pose[:] = p
            smpl.betas[:10] = b
            smpl.trans[:] = t

            # save registration
            Mesh(smpl.r, smpl.f).write_ply(
                join(self.exp_path, save_name + '_ep_{}'.format(epoch),
                     name + '_{}_reg.ply'.format(it)))

            # save raw correspondences
            temp = Mesh(c, [])
            temp.set_vertex_colors(vc)
            temp.write_ply(
                join(self.exp_path, save_name + '_ep_{}'.format(epoch),
                     name + '_{}_craw.ply'.format(it)))

            # save SMPL params
            with open(
                    join(self.exp_path, save_name + '_ep_{}'.format(epoch),
                         name + '_{}_reg.pkl'.format(it)), 'wb') as f:
                pkl.dump({'pose': p, 'betas': b, 'trans': t}, f)
Exemplo n.º 2
0
def get_tpose_smpl():
    sp = SmplPaths(gender='neutral')
    smpl = sp.get_smpl()
    smpl.trans[:] = 0
    smpl.pose[:] = 0

    return smpl
Exemplo n.º 3
0
    def __init__(self,
                 mode,
                 batch_sz,
                 data_path=DATA_PATH,
                 split_file='assets/data_split_01.pkl',
                 num_workers=12,
                 augment=False,
                 naked=False):
        self.mode = mode
        self.path = data_path
        with open(split_file, "rb") as f:
            self.split = pkl.load(f)

        self.data = self.split[mode]
        self.batch_size = batch_sz
        self.num_workers = num_workers
        self.augment = augment
        self.naked = naked
        sp = SmplPaths(gender='male')
        self.ref_smpl = sp.get_smpl()
        self.vt, self.ft = sp.get_vt_ft()

        # Load smpl part labels
        with open('assets/smpl_parts_dense.pkl', 'rb') as f:
            dat = pkl.load(f, encoding='latin-1')
        self.smpl_parts = np.zeros((6890, 1))
        for n, k in enumerate(dat):
            self.smpl_parts[dat[k]] = n
Exemplo n.º 4
0
def get_prior(gender='male', precomputed=False):
    if precomputed:
        prior = Prior(sm=None)
        return prior['Generic']
    else:
        from lib.smpl_paths import SmplPaths

        dp = SmplPaths(gender=gender)
        if gender == 'neutral':
            dp_prior = SmplPaths(gender='male')
        else:
            dp_prior = dp

        prior = Prior(dp_prior.get_smpl())
        return prior['Generic']
Exemplo n.º 5
0
def fit_SMPLD(scans, smpl_pkl=None, gender='male', save_path=None, display=False):
    # Get SMPL faces
    sp = SmplPaths(gender=gender)
    smpl_faces = sp.get_faces()
    th_faces = torch.tensor(smpl_faces.astype('float32'), dtype=torch.long).cuda()

    # Batch size
    batch_sz = len(scans)

    # Init SMPL
    if smpl_pkl is None or smpl_pkl[0] is None:
        print('SMPL not specified, fitting SMPL now')
        pose, betas, trans = fit_SMPL(scans, None, gender, save_path, display)
    else:
        pose, betas, trans = [], [], []
        for spkl in smpl_pkl:
            smpl_dict = pkl.load(open(spkl, 'rb'), encoding='latin-1')
            p, b, t = smpl_dict['pose'], smpl_dict['betas'], smpl_dict['trans']
            pose.append(p)
            if len(b) == 10:
                temp = np.zeros((300,))
                temp[:10] = b
                b = temp.astype('float32')
            betas.append(b)
            trans.append(t)
        pose, betas, trans = np.array(pose), np.array(betas), np.array(trans)

    betas, pose, trans = torch.tensor(betas), torch.tensor(pose), torch.tensor(trans)
    smpl = th_batch_SMPL(batch_sz, betas, pose, trans, faces=th_faces).cuda()

    verts, _, _, _ = smpl()
    init_smpl_meshes = [tm.from_tensors(vertices=v.clone().detach(),
                                        faces=smpl.faces) for v in verts]

    # Load scans
    th_scan_meshes = []
    for scan in scans:
        th_scan = tm.from_obj(scan)
        if save_path is not None:
            th_scan.save_mesh(join(save_path, split(scan)[1]))
        th_scan.vertices = th_scan.vertices.cuda()
        th_scan.faces = th_scan.faces.cuda()
        th_scan.vertices.requires_grad = False
        th_scan_meshes.append(th_scan)

    # Optimize
    optimize_offsets(th_scan_meshes, smpl, init_smpl_meshes, 5, 10)
    print('Done')

    verts, _, _, _ = smpl()
    th_smpl_meshes = [tm.from_tensors(vertices=v,
                                      faces=smpl.faces) for v in verts]

    if save_path is not None:
        if not exists(save_path):
            os.makedirs(save_path)

        names = [split(s)[1] for s in scans]

        # Save meshes
        save_meshes(th_smpl_meshes, [join(save_path, n.replace('.obj', '_smpld.obj')) for n in names])
        save_meshes(th_scan_meshes, [join(save_path, n) for n in names])
        # Save params
        for p, b, t, d, n in zip(smpl.pose.cpu().detach().numpy(), smpl.betas.cpu().detach().numpy(),
                                 smpl.trans.cpu().detach().numpy(), smpl.offsets.cpu().detach().numpy(), names):
            smpl_dict = {'pose': p, 'betas': b, 'trans': t, 'offsets': d}
            pkl.dump(smpl_dict, open(join(save_path, n.replace('.obj', '_smpld.pkl')), 'wb'))

    return smpl.pose.cpu().detach().numpy(), smpl.betas.cpu().detach().numpy(), \
           smpl.trans.cpu().detach().numpy(), smpl.offsets.cpu().detach().numpy()
Exemplo n.º 6
0
def fit_SMPLX(scans,
              pose_files=None,
              gender='male',
              save_path=None,
              display=None):
    """
    :param save_path:
    :param scans: list of scan paths
    :param pose_files:
    :return:
    """
    # Get SMPL faces
    sp = SmplPaths(gender=gender)
    smpl_faces = sp.get_faces()
    th_faces = torch.tensor(smpl_faces.astype('float32'),
                            dtype=torch.long).cuda()

    # Batch size
    batch_sz = len(scans)

    # Set optimization hyper parameters
    iterations, pose_iterations, steps_per_iter, pose_steps_per_iter = 3, 2, 30, 30

    if False:
        """Test by loading GT SMPL params"""
        betas, pose, trans = torch.tensor(
            GT_SMPL['betas'].astype('float32')).unsqueeze(0), torch.tensor(
                GT_SMPL['pose'].astype('float32')).unsqueeze(0), torch.zeros(
                    (batch_sz, 3))
    else:
        prior = get_prior(gender=gender)
        pose_init = torch.zeros((batch_sz, 72))
        pose_init[:, 3:] = prior.mean
        betas, pose, trans = torch.zeros(
            (batch_sz, 300)), pose_init, torch.zeros((batch_sz, 3))

    # Init SMPL, pose with mean smpl pose, as in ch.registration
    smpl = th_batch_SMPL(batch_sz, betas, pose, trans, faces=th_faces).cuda()

    # Load scans and center them. Once smpl is registered, move it accordingly.
    # Do not forget to change the location of 3D joints/ landmarks accordingly.
    th_scan_meshes, centers = [], []
    for scan in scans:
        print('scan path ...', scan)
        th_scan = tm.from_obj(scan)
        # cent = th_scan.vertices.mean(axis=0)
        # centers.append(cent)
        # th_scan.vertices -= cent
        th_scan.vertices = th_scan.vertices.cuda()
        th_scan.faces = th_scan.faces.cuda()
        th_scan.vertices.requires_grad = False
        th_scan.cuda()
        th_scan_meshes.append(th_scan)

    # Load pose information if pose file is given
    # Bharat: Shouldn't we structure th_pose_3d as [key][batch, ...] as opposed to current [batch][key]? See batch_get_pose_obj() in body_objectives.py
    th_pose_3d = None
    if pose_files is not None:
        th_no_right_hand_visible, th_no_left_hand_visible, th_pose_3d = [], [], []
        for pose_file in pose_files:
            with open(pose_file) as f:
                pose_3d = json.load(f)
                th_no_right_hand_visible.append(
                    np.max(
                        np.array(pose_3d['hand_right_keypoints_3d']).reshape(
                            -1, 4)[:, 3]) < HAND_VISIBLE)
                th_no_left_hand_visible.append(
                    np.max(
                        np.array(pose_3d['hand_left_keypoints_3d']).reshape(
                            -1, 4)[:, 3]) < HAND_VISIBLE)

                pose_3d['pose_keypoints_3d'] = torch.from_numpy(
                    np.array(pose_3d['pose_keypoints_3d']).astype(
                        np.float32).reshape(-1, 4))
                pose_3d['face_keypoints_3d'] = torch.from_numpy(
                    np.array(pose_3d['face_keypoints_3d']).astype(
                        np.float32).reshape(-1, 4))
                pose_3d['hand_right_keypoints_3d'] = torch.from_numpy(
                    np.array(pose_3d['hand_right_keypoints_3d']).astype(
                        np.float32).reshape(-1, 4))
                pose_3d['hand_left_keypoints_3d'] = torch.from_numpy(
                    np.array(pose_3d['hand_left_keypoints_3d']).astype(
                        np.float32).reshape(-1, 4))
            th_pose_3d.append(pose_3d)

        prior_weight = get_prior_weight(th_no_right_hand_visible,
                                        th_no_left_hand_visible).cuda()

        # Optimize pose first
        optimize_pose_only(th_scan_meshes,
                           smpl,
                           pose_iterations,
                           pose_steps_per_iter,
                           th_pose_3d,
                           prior_weight,
                           display=None if display is None else 0)

    # Optimize pose and shape
    optimize_pose_shape(th_scan_meshes,
                        smpl,
                        iterations,
                        steps_per_iter,
                        th_pose_3d,
                        display=None if display is None else 0)

    verts, _, _, _ = smpl()
    th_smpl_meshes = [
        tm.from_tensors(vertices=v, faces=smpl.faces) for v in verts
    ]

    if save_path is not None:
        if not exists(save_path):
            os.makedirs(save_path)

        names = [split(s)[1] for s in scans]

        # Save meshes
        save_meshes(
            th_smpl_meshes,
            [join(save_path, n.replace('.obj', '_smpl.obj')) for n in names])
        save_meshes(th_scan_meshes, [join(save_path, n) for n in names])

        # Save params
        for p, b, t, n in zip(smpl.pose.cpu().detach().numpy(),
                              smpl.betas.cpu().detach().numpy(),
                              smpl.trans.cpu().detach().numpy(), names):
            smpl_dict = {'pose': p, 'betas': b, 'trans': t}
            pkl.dump(
                smpl_dict,
                open(join(save_path, n.replace('.obj', '_smpl.pkl')), 'wb'))

        return smpl.pose.cpu().detach().numpy(), smpl.betas.cpu().detach(
        ).numpy(), smpl.trans.cpu().detach().numpy()
Exemplo n.º 7
0
def fit_SMPLD(scans, smpl_pkl, gender='male', save_path=None, scale_file=None):
    # Get SMPL faces
    sp = SmplPaths(gender=gender)
    smpl_faces = sp.get_faces()
    th_faces = torch.tensor(smpl_faces.astype('float32'), dtype=torch.long).cuda()

    # Batch size
    batch_sz = len(scans)

    # Init SMPL
    pose, betas, trans = [], [], []
    for spkl in smpl_pkl:
        smpl_dict = pkl.load(open(spkl, 'rb'))
        p, b, t = smpl_dict['pose'], smpl_dict['betas'], smpl_dict['trans']
        pose.append(p)
        if len(b) == 10:
            temp = np.zeros((300,))
            temp[:10] = b
            b = temp.astype('float32')
        betas.append(b)
        trans.append(t)
    pose, betas, trans = np.array(pose), np.array(betas), np.array(trans)

    betas, pose, trans = torch.tensor(betas), torch.tensor(pose), torch.tensor(trans)
    smpl = th_batch_SMPL(batch_sz, betas, pose, trans, faces=th_faces).cuda()

    verts, _, _, _ = smpl()
    init_smpl_meshes = [tm.from_tensors(vertices=v.clone().detach(),
                                        faces=smpl.faces) for v in verts]

    # Load scans
    th_scan_meshes = []
    for scan in scans:
        print('scan path ...', scan)
        temp = Mesh(filename=scan)
        th_scan = tm.from_tensors(torch.tensor(temp.v.astype('float32'), requires_grad=False, device=DEVICE),
                                  torch.tensor(temp.f.astype('int32'), requires_grad=False, device=DEVICE).long())
        th_scan_meshes.append(th_scan)

    if scale_file is not None:
        for n, sc in enumerate(scale_file):
            dat = np.load(sc, allow_pickle=True)
            th_scan_meshes[n].vertices += torch.tensor(dat[1]).to(DEVICE)
            th_scan_meshes[n].vertices *= torch.tensor(dat[0]).to(DEVICE)

    # Optimize
    optimize_offsets(th_scan_meshes, smpl, init_smpl_meshes, 5, 10)
    print('Done')

    verts, _, _, _ = smpl()
    th_smpl_meshes = [tm.from_tensors(vertices=v,
                                      faces=smpl.faces) for v in verts]

    if save_path is not None:
        if not exists(save_path):
            os.makedirs(save_path)

        names = [split(s)[1] for s in scans]

        # Save meshes
        save_meshes(th_smpl_meshes, [join(save_path, n.replace('.ply', '_smpld.obj')) for n in names])
        save_meshes(th_scan_meshes, [join(save_path, n) for n in names])
        # Save params
        for p, b, t, d, n in zip(smpl.pose.cpu().detach().numpy(), smpl.betas.cpu().detach().numpy(),
                                 smpl.trans.cpu().detach().numpy(), smpl.offsets.cpu().detach().numpy(), names):
            smpl_dict = {'pose': p, 'betas': b, 'trans': t, 'offsets': d}
            pkl.dump(smpl_dict, open(join(save_path, n.replace('.ply', '_smpld.pkl')), 'wb'))

    return smpl.pose.cpu().detach().numpy(), smpl.betas.cpu().detach().numpy(), \
           smpl.trans.cpu().detach().numpy(), smpl.offsets.cpu().detach().numpy()
Exemplo n.º 8
0
def fit_SMPL(scans, scan_labels, gender='male', save_path=None, scale_file=None, display=None):
    """
    :param save_path:
    :param scans: list of scan paths
    :param pose_files:
    :return:
    """
    # Get SMPL faces
    sp = SmplPaths(gender=gender)
    smpl_faces = sp.get_faces()
    th_faces = torch.tensor(smpl_faces.astype('float32'), dtype=torch.long).to(DEVICE)

    # Load SMPL parts
    part_labels = pkl.load(open('/BS/bharat-3/work/IPNet/assets/smpl_parts_dense.pkl', 'rb'))
    labels = np.zeros((6890,), dtype='int32')
    for n, k in enumerate(part_labels):
        labels[part_labels[k]] = n
    labels = torch.tensor(labels).unsqueeze(0).to(DEVICE)

    # Load scan parts
    scan_part_labels = []
    for sc_l in scan_labels:
        temp = torch.tensor(np.load(sc_l).astype('int32')).to(DEVICE)
        scan_part_labels.append(temp)

    # Batch size
    batch_sz = len(scans)

    # Set optimization hyper parameters
    iterations, pose_iterations, steps_per_iter, pose_steps_per_iter = 3, 2, 30, 30

    prior = get_prior(gender=gender, precomputed=True)
    pose_init = torch.zeros((batch_sz, 72))
    pose_init[:, 3:] = prior.mean
    betas, pose, trans = torch.zeros((batch_sz, 300)), pose_init, torch.zeros((batch_sz, 3))

    # Init SMPL, pose with mean smpl pose, as in ch.registration
    smpl = th_batch_SMPL(batch_sz, betas, pose, trans, faces=th_faces).to(DEVICE)
    smpl_part_labels = torch.cat([labels] * batch_sz, axis=0)

    th_scan_meshes, centers = [], []
    for scan in scans:
        print('scan path ...', scan)
        temp = Mesh(filename=scan)
        th_scan = tm.from_tensors(torch.tensor(temp.v.astype('float32'), requires_grad=False, device=DEVICE),
                                  torch.tensor(temp.f.astype('int32'), requires_grad=False, device=DEVICE).long())
        th_scan_meshes.append(th_scan)

    if scale_file is not None:
        for n, sc in enumerate(scale_file):
            dat = np.load(sc, allow_pickle=True)
            th_scan_meshes[n].vertices += torch.tensor(dat[1]).to(DEVICE)
            th_scan_meshes[n].vertices *= torch.tensor(dat[0]).to(DEVICE)

    # Optimize pose first
    optimize_pose_only(th_scan_meshes, smpl, pose_iterations, pose_steps_per_iter, scan_part_labels, smpl_part_labels,
                       display=None if display is None else 0)

    # Optimize pose and shape
    optimize_pose_shape(th_scan_meshes, smpl, iterations, steps_per_iter, scan_part_labels, smpl_part_labels,
                        display=None if display is None else 0)

    verts, _, _, _ = smpl()
    th_smpl_meshes = [tm.from_tensors(vertices=v, faces=smpl.faces) for v in verts]

    if save_path is not None:
        if not exists(save_path):
            os.makedirs(save_path)

        names = [split(s)[1] for s in scans]

        # Save meshes
        save_meshes(th_smpl_meshes, [join(save_path, n.replace('.ply', '_smpl.obj')) for n in names])
        save_meshes(th_scan_meshes, [join(save_path, n) for n in names])

        # Save params
        for p, b, t, n in zip(smpl.pose.cpu().detach().numpy(), smpl.betas.cpu().detach().numpy(),
                              smpl.trans.cpu().detach().numpy(), names):
            smpl_dict = {'pose': p, 'betas': b, 'trans': t}
            pkl.dump(smpl_dict, open(join(save_path, n.replace('.ply', '_smpl.pkl')), 'wb'))

        return smpl.pose.cpu().detach().numpy(), smpl.betas.cpu().detach().numpy(), smpl.trans.cpu().detach().numpy()
Exemplo n.º 9
0
    def __init__(self,
                 model,
                 device,
                 train_loader,
                 val_loader,
                 exp_name,
                 opt_dict={},
                 optimizer='Adam',
                 checkpoint_number=-1,
                 train_supervised=False):
        """
        :param model: correspondence prediction network
        :param device: cuda or cpu
        :param train_loader:
        :param val_loader:
        :param exp_name:
        :param opt_dict: dict containing optimization specific parameteres
        :param optimizer:
        :param checkpoint_number: load a specific checkpoint, -1 => load latest
        :param train_supervised:
        """
        self.model = model.to(device)
        self.device = device
        self.opt_dict = self.parse_opt_dict(opt_dict)
        self.optimizer_type = optimizer
        self.optimizer = self.init_optimizer(optimizer,
                                             self.model.parameters(),
                                             learning_rate=0.001)

        self.train_data_loader = train_loader
        self.val_data_loader = val_loader
        self.train_supervised = train_supervised
        self.checkpoint_number = checkpoint_number

        # Load vsmpl
        self.vsmpl = VolumetricSMPL(
            '/BS/bharat-2/work/LearntRegistration/test_data/volumetric_smpl_function_64',
            device, 'male')
        sp = SmplPaths(gender='male')
        self.ref_smpl = sp.get_smpl()
        self.template_points = torch.tensor(trimesh.Trimesh(
            vertices=self.ref_smpl.r,
            faces=self.ref_smpl.f).sample(NUM_POINTS).astype('float32'),
                                            requires_grad=False).unsqueeze(0)

        self.pose_prior = get_prior('male', precomputed=True)

        # Load smpl part labels
        with open(
                '/BS/bharat-2/work/LearntRegistration/test_data/smpl_parts_dense.pkl',
                'rb') as f:
            dat = pkl.load(f, encoding='latin-1')
        self.smpl_parts = np.zeros((6890, 1))
        for n, k in enumerate(dat):
            self.smpl_parts[dat[k]] = n

        self.exp_path = join(os.path.dirname(__file__),
                             '../experiments/{}'.format(exp_name))
        self.checkpoint_path = join(self.exp_path,
                                    'checkpoints/'.format(exp_name))
        if not os.path.exists(self.checkpoint_path):
            print(self.checkpoint_path)
            os.makedirs(self.checkpoint_path)
        self.writer = SummaryWriter(
            join(self.exp_path, 'summary'.format(exp_name)))
        self.val_min = None