Beispiel #1
0
 def __init__(self, args):
     super(HybridModel, self).__init__()
     self.feat_dim = args.embed_dim
     if args.cls:
         oup_dim = 6890
     else:
         oup_dim = self.feat_dim
     self.fe = PointNet2(inp_dim=0, oup_dim=self.feat_dim)
     if args.transf_reg:
         if args.RegNet == 'Reg2':
             self.reg = Regularization2Module()
         else:
             if args.RegNet == 'Reg':
                 self.reg = RegularizationModule(init=args.init)
         if args.animals:
             gt_feats = torch.Tensor(
                 helper.loadSMALDescriptors(args.desc)[:, :self.feat_dim])
             gt_points = torch.Tensor(
                 np.array(helper.loadSMALModels()['cat'].vertices))
         else:
             gt_feats = torch.Tensor(
                 helper.loadSMPLDescriptors(args.desc)[:, :self.feat_dim])
             gt_points = torch.Tensor(helper.loadSMPLModels()[0].verts)
         self.register_buffer('gt_feats', gt_feats)
         self.register_buffer('gt_points', gt_points)
     self.transf_reg = args.transf_reg
Beispiel #2
0
 def __init__(self,
              descriptor_dim,
              sampler=None,
              split='train',
              transform=DefaultTransform,
              cls=False,
              build_graph=False):
     super(SurrealFEPts5k).__init__()
     self.name = 'SurrealFEPts5k'
     self.split = split
     if self.split == 'train':
         self.IDlist = IDlist[:, :-(num_test * num_views)].reshape(-1)
     elif self.split == 'test':
         self.IDlist = IDlist[:, -(num_test * num_views):].reshape(-1)
     elif self.split == 'val':
         self.IDlist = IDlist[:, :num_views].reshape(-1)
     self.file_path = '{}/scans/{{0:06d}}/{{1:03d}}.mat'.format(
         PATH_TO_SURREAL)
     self.template_feats = helper.loadSMPLDescriptors()[:, :descriptor_dim]
     self.template_points = helper.loadSMPLModels()[0].verts
     self.cls = cls
     if build_graph:
         self.transform = T.Compose(
             [transform, T.KNNGraph(k=6),
              T.ToDense(5000)])
     else:
         self.transform = T.Compose([transform, T.ToDense(5000)])
 def __init__(self, descriptor_dim, split='train', desc='Laplacian_n',
              transform='default', cls=False):
   super(SurrealFEPts).__init__()
   self.name = 'SurrealFEPts'
   if (split == 'train') or (split == 'val'):
     self.num_views = 20
   else:
     self.num_views = 100
   self.IDlist = np.stack([np.arange(100000*self.num_views),
                           np.arange(115000*self.num_views,
                                     215000*self.num_views)],
                          axis=0)
   self.split = split
   if self.split == 'train':
     self.IDlist = self.IDlist[:, :-(num_test*self.num_views)].reshape(-1)
     self.file_path = '{}/scans/{{0:06d}}/{{1:03d}}.mat'.format(hc.PATH_TO_SURREAL)
   elif self.split == 'test':
     self.IDlist = self.IDlist[:, -(num_test*self.num_views):].reshape(-1)
     self.file_path = '{}/scans/{{0:06d}}/{{1:03d}}.mat'.format(hc.PATH_TO_SURREAL_TEST)
   elif self.split == 'val':
     self.IDlist = self.IDlist[:, :(5*self.num_views)].reshape(-1)
     self.file_path = '{}/scans/{{0:06d}}/{{1:03d}}.mat'.format(hc.PATH_TO_SURREAL)
     #ll = [20,24,25,26,28,35,36,37,39,52,53,106,128,152,160,178,187,191]
     #self.IDlist = np.array([self.IDlist[l] for l in ll])
   self.result_path = '{}/result/SURREAL/'.format(hc.PATH_TO_DATA)
   self.template_feats = helper.loadSMPLDescriptors(desc)[:, :descriptor_dim]
   self.template_points = helper.loadSMPLModels()[0].verts
   self.cls = cls
   if transform == 'default':
     if self.split == 'train':
       self.transform = TrainTransform
     else:
       self.transform = TestTransform
   else:
     self.transform = transform
 def __init__(self,
              descriptor_dim,
              split='train',
              desc='Laplacian_n',
              transform='default',
              cls=False):
     super(Shrec19FEPts).__init__()
     self.name = 'Shrec19FEPts'
     self.num_views = 100
     self.result_path = '{}/result/SHREC19/'.format(hc.PATH_TO_DATA)
     self.IDlist = np.arange(1 * num_views, 45 * num_views)
     self.split = split
     if self.split == 'train':
         raise RuntimeError("This dataset is Test Only")
     elif self.split == 'test':
         self.IDlist = self.IDlist
     elif self.split == 'val':
         self.IDlist = self.IDlist[:num_views]
     self.file_path = '{}/scans/{{}}/{{:03d}}.mat'.format(
         hc.PATH_TO_SHREC19)
     self.template_feats = helper.loadSMPLDescriptors(
         desc)[:, :descriptor_dim]
     self.template_points = helper.loadSMPLModels()[0].verts
     self.cls = cls
     if transform == 'default':
         if self.split == 'train':
             self.transform = TestTransform
         else:
             self.transform = TrainTransform
     else:
         self.transform = transform
 def __init__(self,
              descriptor_dim,
              split='train',
              desc='Laplacian_n',
              transform='default',
              cls=False):
     super(DGFSurrealFEPts).__init__()
     self.name = 'DGFSurrealFEPts'
     self.num_views = 20
     self.split = split
     self.result_path = '{}/result/DGFSURREAL/'.format(hc.PATH_TO_DATA)
     if self.split == 'train':
         self.IDlist = IDlist.reshape(-1)
     elif self.split == 'test':
         self.IDlist = IDlist[-(num_test * num_views):].reshape(-1)
     elif self.split == 'val':
         self.IDlist = IDlist[:num_views * 5].reshape(-1)
         #ll = [20,24,25,26,28,35,36,37,39,52,53,106,128,152,160,178,187,191]
         #self.IDlist = np.array([self.IDlist[l] for l in ll])
     self.file_path = '{}/scans/{{0:06d}}/{{1:03d}}.mat'.format(
         hc.PATH_TO_DGFSURREAL)
     self.template_feats = helper.loadSMPLDescriptors(
         desc)[:, :descriptor_dim]
     self.template_points = helper.loadSMPLModels()[0].verts
     self.cls = cls
     if transform == 'default':
         if self.split == 'train':
             self.transform = TrainTransform
         else:
             self.transform = TestTransform
     else:
         self.transform = transform
Beispiel #6
0
 def __init__(self,
              descriptor_dim,
              split='train',
              desc='Laplacian_n',
              transform='default',
              cls=False):
     super(FaustTestFEPts).__init__()
     self.name = 'FaustTestFEPts'
     self.num_views = 200
     self.result_path = '{}/result/FAUST-Test/'.format(hc.PATH_TO_DATA)
     self.IDlist = np.arange(20000)
     self.split = split
     if self.split == 'train':
         raise RuntimeError("This dataset is Test Only")
     elif self.split == 'test':
         self.IDlist = self.IDlist
     elif self.split == 'val':
         self.IDlist = self.IDlist[:40]
     self.file_path = '{}/scans/{{0:03d}}_{{1:03d}}.mat'.format(
         hc.PATH_TO_FAUST_TEST_SCANS)
     self.template_feats = helper.loadSMPLDescriptors(
         desc)[:, :descriptor_dim]
     self.template_points = helper.loadSMPLModels()[0].verts
     #self.cls = cls
     if transform == 'default':
         if self.split == 'train':
             self.transform = TrainTransform
         else:
             self.transform = TestTransform
     else:
         self.transform = transform
Beispiel #7
0
 def __init__(self, num_points, descriptor_dim, split='train'):
     super(SurrealFEDepthImgs).__init__()
     self.name = 'SurrealFEDepthImgs'
     self.split = split
     self.num_sample_points = num_points
     if self.split == 'train':
         self.IDlist = IDlist[:, :-(num_test * num_views)].reshape(-1)
     elif self.split == 'test':
         self.IDlist = IDlist[:, -(num_test * num_views):].reshape(-1)
     elif self.split == 'val':
         self.IDlist = IDlist[:, :num_views].reshape(-1)
     #self.file_path = '{}/scans/%d_%d.mat'.format(PATH_TO_SURREAL)
     self.file_path = '{}/scans/{{0:06d}}/{{1:03d}}.mat'.format(
         PATH_TO_SURREAL)
     self.template_feats = helper.loadSMPLDescriptors()[:, :descriptor_dim]
     self.template_points = helper.loadSMPLModels()[0].verts
  #if not os.path.exists('{}/{}.txt'.format(rotation_path, n_views-1)):
  #  thetas = np.linspace(0, np.pi*2, n_views)
  #  rotations = [linalg.rodriguez(np.random.randn(3)) for i in range(n_views)]
  #  #rotations = [linalg.rodriguez(np.array([1.,0.,0.])*thetas[i] + np.random.randn(3)*0.2) for i in range(n_views)]
  #  for i, rotation in enumerate(rotations):
  #    np.savetxt('{}/{}.txt'.format(rotation_path, i), rotation)
  #else:
  #  rotations = [np.loadtxt('{}/{}.txt'.format(rotation_path, i)).reshape((3, 3)) for i in range(n_views)]

  render_path = '{}/surreal-test/scans'.format(PATH_TO_DATA)
  MAT_PATH = '{}/{{0:06d}}'.format(render_path)
  MAT = '{}/{{0:06d}}/{{1:03d}}.mat'.format(render_path)
  OBJ = '{}/{{0:06d}}/{{1:03d}}.obj'.format(render_path)
  os.system('mkdir -p %s' % render_path)
  models = helper.loadSMPLModels()
  gt_dsc = helper.loadSMPLDescriptors(desc='Laplacian_n')
  edges = computeGraph(6890, models[0].faces, knn=args.knn)
  for mesh_id in range(offset, offset+length):
    os.system('mkdir -p %s' % MAT_PATH.format(mesh_id))
    params = np.array(smpl_params[mesh_id, :])
    params[11:14] = 0.0
    gender = int(params[0])
    model = models[gender]
    zero_params = np.zeros(85)
    model.update_params(zero_params)
    rest_mesh = vis.getTriangleMesh(model.verts, model.faces)
    params = np.concatenate([np.zeros(3), params[1:]], axis=0)
    model.update_params(params)
    mesh = vis.getTriangleMesh(model.verts, model.faces)
    point_rotations, point_translations = helper.computeLocalRotations(np.array(mesh.vertices), np.array(mesh.triangles), np.array(rest_mesh.vertices), np.arange(np.array(mesh.vertices).shape[0]), edges=edges) # [N, 3]
Beispiel #9
0
if __name__ == '__main__':
  SCAN = '%s/MPI-FAUST/training/scans/tr_scan_{0:03d}.ply' % (PATH_TO_DATA)
  SCAN_CORRES = '%s/MPI-FAUST/training/scans/tr_scan_{0:03d}.corres' % (PATH_TO_DATA)
  REG = '%s/MPI-FAUST/training/registrations/tr_reg_{0:03d}.ply' % (PATH_TO_DATA)
  FAUST_DESC = '%s/faust/faust_descriptors/{0:03d}.mat' % (PATH_TO_DATA)
  rotation_path = '%s/faust/render_rotations' % (PATH_TO_DATA)
  render_path = '%s/faust/scans' % (PATH_TO_DATA)
  OBJ = '%s/{0:03d}_{1:03d}.obj' % (render_path)
  CORRES = '%s/{0:03d}_{1:03d}.corres' % (render_path)
  MAT = '%s/{0:03d}_{1:03d}.mat' % (render_path)

  #camera = PinholeCamera()
  #rest_mesh = helper.loadSMPLModels()[0]
  #edges = computeGraph(6890, rest_mesh.faces, knn=7)
  #rest_mesh = vis.getTriangleMesh(rest_mesh.verts, rest_mesh.faces)
  gt_descriptors = helper.loadSMPLDescriptors('Laplacian_n')
  dsc_tree = NN(n_neighbors=1, n_jobs=10).fit(gt_descriptors)
  for scan_id in range(100):
    print(scan_id)
    """ Correspondence translation """
    raw_mesh = o3d.io.read_triangle_mesh(SCAN.format(scan_id)) # raw mesh
    reg_mesh = o3d.io.read_triangle_mesh(REG.format(scan_id)) # registration mesh
    tree = NN(n_neighbors=1, n_jobs=10).fit(np.array(reg_mesh.vertices))
    dists, indices = tree.kneighbors(np.array(raw_mesh.vertices))
    scan2reg = indices[:, 0]
    Nraw = np.array(raw_mesh.vertices).shape[0]
    Nreg = np.array(reg_mesh.vertices).shape[0]

    mesh = o3d.io.read_triangle_mesh(SCAN.format(scan_id))
    dsc_file = FAUST_DESC.format(scan_id)
    gt_dsc = sio.loadmat(dsc_file)['dsc'] # [6890, 128]