コード例 #1
0
def init_smpl(gender, init_pose_path, gar_file_path, template_file_pkl_path,
              gar_type):
    """

    """
    dp = SmplPaths(gender=gender)

    smpl_h = Smpl(dp.get_hres_smpl_model_data())

    tgt_apose = pkl.load(open(init_pose_path, 'rb'))

    tgt_apose = tgt_apose['pose']
    if gar_type == 'shorts':
        tgt_apose[5] = 0.3
        tgt_apose[8] = -0.3

    smpl_h.trans[:] = 0

    gar = pkl.load(open(gar_file_path))
    verts = gar[gar_type]['vert_indices']

    data = pkl.load(open(template_file_pkl_path))
    v_personal = np.array(data['v_personal'])
    smpl_h.v_personal[verts] = v_personal

    smpl_h.pose[:] = tgt_apose

    return smpl_h
コード例 #2
0
def load_smpl_from_file(file):
    dat = pkl.load(open(file, 'rb'), encoding='latin1')
    dp = SmplPaths(gender=dat['gender'])
    smpl_h = Smpl(dp.get_hres_smpl_model_data())

    smpl_h.pose[:] = dat['pose']
    smpl_h.betas[:] = dat['betas']
    smpl_h.trans[:] = dat['trans']

    return smpl_h
コード例 #3
0
def pose_garment(garment, vert_indices, smpl_params):
    '''
    :param smpl_params: dict with pose, betas, v_template, trans, gender
    '''
    dp = SmplPaths(gender=smpl_params['gender'])
    smpl = Smpl(dp.get_hres_smpl_model_data())
    smpl.pose[:] = 0
    smpl.betas[:] = smpl_params['betas']
    # smpl.v_template[:] = smpl_params['v_template']

    offsets = np.zeros_like(smpl.r)
    offsets[vert_indices] = garment.v - smpl.r[vert_indices]
    smpl.v_personal[:] = offsets
    smpl.pose[:] = smpl_params['pose']
    smpl.trans[:] = smpl_params['trans']

    mesh = Mesh(smpl.r, smpl.f).keep_vertices(vert_indices)
    return mesh
コード例 #4
0
def init_smpl(gender, init_pose_path, gar_file_path, template_file_pkl_path, gar_type):
    """

    """
    dp = SmplPaths(gender=gender)

    smpl_h = Smpl(dp.get_hres_smpl_model_data())

    pose_file = open(init_pose_path, 'rb')
    u = pkl._Unpickler(pose_file)
    u.encoding = 'latin1'
    tgt_apose = u.load()

    tgt_apose = tgt_apose['pose']
    if gar_type == 'shorts':
        tgt_apose[5] = 0.3
        tgt_apose[8] = -0.3

    smpl_h.trans[:] = 0

    gar_file = open(gar_file_path, 'rb')
    u = pkl._Unpickler(gar_file)
    u.encoding = 'latin1'
    gar = u.load()
    verts = gar[gar_type]['vert_indices']

    template_file = open(template_file_pkl_path, 'rb')
    u = pkl._Unpickler(template_file)
    u.encoding = 'latin1'
    data = u.load()

    v_personal = np.array(data['v_personal'])
    smpl_h.v_personal[verts] = v_personal

    smpl_h.pose[:] = tgt_apose

    return smpl_h
コード例 #5
0
    return ret_posed_interp


path = '/home/nathanbendich/MultiGarmentNetwork/transl8d_py3/clothes_objs/'
all_scans = glob(path + '*')
garment_classes = [
    'Pants', 'ShortPants', 'ShirtNoCoat', 'TShirtNoCoat', 'LongCoat'
]
gar_dict = {}
for gar in garment_classes:
    gar_dict[gar] = glob(join(path, '*', gar + '.obj'))

if __name__ == '__main__':
    dp = SmplPaths()
    vt, ft = dp.get_vt_ft_hres()
    smpl = Smpl(dp.get_hres_smpl_model_data())

    ## This file contains correspondances between garment vertices and smpl body
    fts_file = 'assets/garment_fts.pkl'
    vert_indices, fts = pkl.load(open(fts_file, 'rb'), encoding='latin1')
    fts['naked'] = ft

    ## Choose any garment type as source   -BLB
    #garment_type = 'TShirtNoCoat'  # 'TShirtNoCoat' was the original garment type entered by Bhatnagar, Mon Mar  2 23:17:29 EST 2020   -nxb.
    garment_type = 'Pants'
    index = np.random.randint(0, len(
        gar_dict[garment_type]))  ## Randomly pick from the digital wardrobe
    path = split(gar_dict[garment_type][index])[0]

    garment_org_body_unposed = load_smpl_from_file(
        join(path, 'registration.pkl'))
コード例 #6
0
def main(opt):
    if opt.save_json_file != "None":
        dict_opts = vars(opt)
        with open(opt.save_json_file, 'w') as f:
            json.dump(dict_opts, f, sort_keys=True, indent=4)

    # Initialize joints used
    if not (opt.init_pose_joints == "None"):
        init_joints_list = [
            int(item) for item in opt.init_pose_joints.split(',')
        ]
    else:
        init_joints_list = None

    if not (opt.ref_joint_list_coup == "None"):
        ref_joints_list = [
            int(item) for item in opt.ref_joint_list_coup.split(',')
        ]
    else:
        ref_joints_list = None

    # GET FILES
    TWO_UP = up(up(os.path.abspath(__file__)))

    opt.init_pose_path = os.path.join(TWO_UP, 'assets/apose.pkl')
    opt.fmap_path = os.path.join(TWO_UP,
                                 'assets/fmaps/{}.npy'.format(opt.gar_type))
    opt.cam_file = os.path.join(TWO_UP, 'assets/cam_file.pkl')
    opt.template_mesh_path = os.path.join(
        TWO_UP, 'assets/init_meshes/{}.obj'.format(opt.gar_type))
    opt.template_mesh_pkl_path = os.path.join(
        TWO_UP, 'assets/init_meshes/{}.pkl'.format(opt.gar_type))
    opt.gar_file_path = os.path.join(TWO_UP, 'assets/gar_file.pkl')

    # Get camera params
    cam_data = pkl.load(open(opt.cam_file, 'r'))
    opt.cam_z, opt.cam_y = cam_data[opt.gar_type]['cam_z'], cam_data[
        opt.gar_type]['cam_y']

    # Get vertex and face ids
    gar = pkl.load(open(opt.gar_file_path))
    v_ids_template = gar[opt.gar_type]['vert_indices']
    faces_template = gar[opt.gar_type]['f']

    # Get vertex ids and faces for the template
    vertices_template, faces_side, v_ids_side = get_part(
        opt.front, opt.fmap_path, opt.template_mesh_path)

    # Initialize the SMPL template
    template_smpl = init_smpl(
        gender=opt.gender,
        init_pose_path=opt.init_pose_path,
        gar_file_path=opt.gar_file_path,
        template_file_pkl_path=opt.template_mesh_pkl_path,
        gar_type=opt.gar_type)

    # Get masks and distance transforms
    mask = get_mask(opt.mask_file)
    dist_i, dist_o, dif_mask = get_dist_tsfs(mask)
    # ==============================================
    #               FIRST STAGE
    # ==============================================

    ## Initialize camera and renderer
    ## Initialize debug camera and renderer

    debug_cam_init, debug_rend_init = get_cam_rend(
        verts=template_smpl[v_ids_template][v_ids_side],
        faces=faces_side,
        cam_y=opt.cam_y,
        cam_z=opt.cam_z)

    opt_cam_init, opt_rend_init = get_cam_rend(
        verts=template_smpl[v_ids_template][v_ids_side],
        faces=faces_side,
        cam_y=opt.cam_y,
        cam_z=opt.cam_z)

    part_mesh, temp_params = init_fit(opt=opt,
                                      dist_o=dist_o,
                                      dist_i=dist_i,
                                      dif_mask=dif_mask,
                                      rn_m=opt_rend_init,
                                      smpl_h=template_smpl,
                                      v_ids_template=v_ids_template,
                                      faces_template=faces_template,
                                      debug_rn=debug_rend_init,
                                      v_ids_side=v_ids_side,
                                      faces_side=faces_side,
                                      joints_list=init_joints_list)

    # ==============================================
    #               REFINEMENT STAGE
    # ==============================================

    v = np.array(part_mesh.v)
    v_offset = ch.zeros(v.shape)

    dp = SmplPaths(gender=opt.gender)
    smpl_h_refine = Smpl(dp.get_hres_smpl_model_data())

    data = temp_params
    smpl_h_refine.pose[:] = data["pose"]
    smpl_h_refine.trans[:] = data["trans"]
    smpl_h_refine.betas[:] = data["betas"]
    smpl_h_refine.v_personal[:] = data["v_personal"]

    ## Initialize second camera and renderer
    ## Initialize second debug camera and renderer

    debug_cam_ref, debug_rend_ref = get_cam_rend(verts=v[v_ids_side] +
                                                 v_offset[v_ids_side],
                                                 faces=faces_side,
                                                 cam_y=opt.cam_y,
                                                 cam_z=opt.cam_z)
    opt_cam_ref, opt_rend_ref = get_cam_rend(verts=v[v_ids_side] +
                                             v_offset[v_ids_side],
                                             faces=faces_side,
                                             cam_y=opt.cam_y,
                                             cam_z=opt.cam_z)

    ## Rings and camera for the projection error
    gar_rings = compute_boundaries(v + v_offset, faces_template)
    position_largest_ring = get_verts_rings(gar_rings=gar_rings,
                                            verts=v + v_offset,
                                            v_ids_side=v_ids_side)
    proj_cam_ref, _ = get_cam_rend(verts=position_largest_ring,
                                   faces=faces_side,
                                   cam_y=opt.cam_y,
                                   cam_z=opt.cam_z)
    max_y, min_y = get_max_min_mask(mask)

    final_verts, final_iou = final_fit(opt=opt,
                                       part_mesh=part_mesh,
                                       v=v,
                                       v_offset=v_offset,
                                       dist_o=dist_o,
                                       dist_i=dist_i,
                                       smpl_h_ref=smpl_h_refine,
                                       rn_m=opt_rend_ref,
                                       debug_rn=debug_rend_ref,
                                       dif_mask=dif_mask,
                                       v_ids_template=v_ids_template,
                                       faces_template=faces_template,
                                       v_ids_side=v_ids_side,
                                       faces_side=faces_side,
                                       max_y=max_y,
                                       proj_cam=proj_cam_ref,
                                       ref_joint_list_coup=ref_joints_list)

    mesh_sv = Mesh(v=final_verts, f=faces_template)
    mesh_sv.write_obj(opt.save_file)

    if opt.save_iou_file != "None":
        with open(opt.save_iou_file, 'a+') as fp:
            fp.write('{} , {} \n'.format(opt.save_file, str(final_iou)))

        fp.close()