style_model = np.load(osp.join(ROOT, 'skirt_female', 'style_model.npz')) pca = PCA(n_components=4) pca.components_ = style_model['pca_w'] pca.mean_ = style_model['mean'] skirt_v = pca.inverse_transform(np.zeros([1, 4])).reshape([-1, 3]) # move the skirt to the right position with open(osp.join(ROOT, 'garment_class_info.pkl'), 'rb') as f: garment_meta = pickle.load(f) skirt_f = garment_meta['skirt']['f'] vert_indices = garment_meta['pant']['vert_indices'] up_bnd_inds = np.load(osp.join(ROOT, 'skirt_upper_boundary.npy')) pant_up_bnd_inds = np.load(osp.join(ROOT, 'pant_upper_boundary.npy')) waist_body_inds = vert_indices[pant_up_bnd_inds] smpl = SMPLNP(gender='female') apose = get_Apose() body_v, _ = smpl(np.zeros([300]), apose, None, None) trans = np.mean(body_v[waist_body_inds], 0, keepdims=True) - np.mean( skirt_v[up_bnd_inds], 0, keepdims=True) skirt_v = skirt_v + trans skirt_v[:, 0] -= 0.01 p = 1 K = 100 # find closest vertices dist = np.sqrt(np.sum(np.square(skirt_v[:, None] - body_v[None]), 2)) # n_skirt, n_body body_ind = np.argsort(dist, 1)[:, :K]
if __name__ == '__main__': # STAGE1_FNUM = 5 # STAGE2_FNUM = 5 # STAGE3_FNUM = 5 SM_FNUM = 6 STABLE_FNUM = 4 END_FNUM = 5 lowest = -2 apose = get_Apose() with open(osp.join(ROOT, 'garment_class_info.pkl'), 'rb') as f: garment_meta = pickle.load(f) for gender in ['female']: smpl = SMPLNP_Lres(gender=gender) smpl_hres = SMPLNP(gender=gender) num_betas = 10 if gender == 'neutral' else 300 betas = np.zeros([9, num_betas], dtype=np.float32) betas[0, 0] = 2 betas[1, 0] = -2 betas[2, 1] = 2 betas[3, 1] = -2 betas[4, 0] = 1 betas[5, 0] = -1 betas[6, 1] = 1 betas[7, 1] = -1 vcanonical = smpl(np.zeros_like(betas[0]), apose) for gc in ['skirt']: gc_gender_dir = osp.join(ROOT, '{}_{}'.format(gc, gender)) shape_dir = osp.join(gc_gender_dir, 'shape')
if __name__ == '__main__': garment_class = 'skirt' RAW_DIR = osp.join(ROOT, 'skirt_orig_reg') RAW_EXT_DIR = osp.join(ROOT, 'skirt_reg') SAVE_DIR = osp.join(ROOT, 'raw_data') # SAVE_DIR = '/BS/cloth-anim/static00/tailor_data/raw_data' people_names, verts, faces = get_orig_verts( RAW_DIR, osp.join(ROOT, 'skirt_orig_bad.txt')) people_names_ext, verts_ext, _ = get_verts(RAW_EXT_DIR) verts_ext /= 1.15 verts = np.concatenate((verts, verts_ext), 0) people_names = people_names + people_names_ext n_verts = verts[0].shape[0] smpl = SMPLNP('female') apose = get_Apose() canonical_body, _ = smpl(np.zeros([300]), apose, None, None) with open(osp.join(ROOT, 'garment_class_info.pkl'), 'rb') as f: class_info = pickle.load(f, encoding='latin-1') pant_ind = class_info['pant']['vert_indices'] pant_bnd = np.load(osp.join(ROOT, 'pant_upper_boundary.npy')) skirt_bnd = np.load(osp.join(ROOT, 'skirt_upper_boundary.npy')) pant_bnd_loc = np.mean(canonical_body[pant_ind][pant_bnd], 0) all_v = [] for people_name, v in zip(people_names, verts): skirt_bnd_loc = np.mean(v[skirt_bnd], 0) trans = (pant_bnd_loc - skirt_bnd_loc)[None] trans_v = v + trans all_v.append(trans_v)
import torch import numpy as np import trimesh from smpl_torch import SMPLNP from utils.rotation import get_Apose from utils.ios import read_pc2 from global_var import ROOT if __name__ == '__main__': garment_class = 'skirt' gender = 'female' lowest = -2 STABILITY_FRAMES = 2 smpl = SMPLNP(gender) apose = torch.from_numpy(get_Apose().astype(np.float32)) data_root = osp.join(ROOT, '{}_{}'.format(garment_class, gender)) pose_dir = osp.join(ROOT, '{}_{}'.format(garment_class, gender), 'pose') ss_dir = osp.join(data_root, 'style_shape') shape_dir = osp.join(data_root, 'shape') beta_strs = [k.replace('.obj', '') for k in os.listdir(shape_dir) if k.endswith('.obj')] betas = np.load(osp.join(data_root, 'shape', 'betas.npy')) all_ss = [k for k in os.listdir(pose_dir) if len(k) == 7] for ss in all_ss: beta_str, gamma_str = ss.split('_') pose_ss_dir = osp.join(pose_dir, ss) if garment_class in ['pant', 'skirt', 'short-pant']:
import os.path as osp import pickle import numpy as np from tqdm import tqdm from smpl_torch import SMPLNP from utils.rotation import get_Apose from utils.diffusion_smoothing import DiffusionSmoothing as DS from global_var import ROOT if __name__ == '__main__': garment_class = 'pant' raw_dir = osp.join(ROOT, 'raw_data') # save_dir = osp.join(ROOT, '{}_{}/pca'.format(garment_class, gender)) smpl = SMPLNP('neutral') apose = get_Apose() with open(osp.join(ROOT, 'garment_class_info.pkl'), 'rb') as f: class_info = pickle.load(f, encoding='latin-1') vert_indices = class_info[garment_class]['vert_indices'] raw_path = osp.join(raw_dir, '{}.npy'.format(garment_class)) beta_path = osp.join(raw_dir, '{}_betas.npy'.format(garment_class)) all_disp = np.load(raw_path) betas = np.load(beta_path) data_num = len(all_disp) vbody, vcloth = smpl(betas, np.tile(apose[None], [data_num, 1]), all_disp, garment_class, batch=True) canonical_body, _ = smpl(np.zeros([10]), apose, None, None) trans = np.mean((canonical_body[None] - vbody)[:, vert_indices, :], 1, keepdims=True)
import os import os.path as osp import cv2 import numpy as np import pickle from renderer import Renderer from smpl_torch import SMPLNP from global_var import DATA_DIR if __name__ == '__main__': garment_class = 't-shirt' gender = 'female' img_size = 512 renderer = Renderer(img_size) smpl = SMPLNP(gender=gender, cuda=False) pose_dir = osp.join(DATA_DIR, '{}_{}'.format(garment_class, gender), 'pose') shape_dir = osp.join(DATA_DIR, '{}_{}'.format(garment_class, gender), 'shape') ss_dir = osp.join(DATA_DIR, '{}_{}'.format(garment_class, gender), 'style_shape') pose_vis_dir = osp.join(DATA_DIR, '{}_{}'.format(garment_class, gender), 'pose_vis') ss_vis_dir = osp.join(DATA_DIR, '{}_{}'.format(garment_class, gender), 'style_shape_vis') pivots_path = osp.join(DATA_DIR, '{}_{}'.format(garment_class, gender), 'pivots.txt') avail_path = osp.join(DATA_DIR, '{}_{}'.format(garment_class, gender), 'avail.txt') os.makedirs(pose_vis_dir, exist_ok=True) os.makedirs(ss_vis_dir, exist_ok=True) with open(os.path.join(DATA_DIR, 'garment_class_info.pkl'), 'rb') as f: class_info = pickle.load(f, encoding='latin-1') body_f = smpl.base.faces garment_f = class_info[garment_class]['f']