def test_plane_estimation_roty_plane(angle): from diffrend.numpy.ops import axis_angle_matrix x, y = np.meshgrid(np.linspace(-1, 1, 5), np.linspace(1, -1, 5)) z = np.zeros_like(x) pos = tch_var_f(np.stack((x, y, z), axis=2)) M = tch_var_f(axis_angle_matrix(axis=[0, 1, 0], angle=angle)) pos = pos.view(-1, 3).matmul(M[:, :3].transpose( 1, 0))[:, :3].contiguous().view(pos.shape) normals = estimate_surface_normals_plane_fit(pos, None) normals_ = get_data(normals) np.testing.assert_array_almost_equal( np.sum(get_data(pos) * normals_, axis=-1), np.zeros(normals.shape[:2])) nc_cost = get_data(normal_consistency_cost(pos, normals, 1)) print('nc_cost', nc_cost) np.testing.assert_almost_equal(nc_cost, 0.0) M = tch_var_f(axis_angle_matrix(axis=[0, 1, 0], angle=-angle)) pos = pos.view(-1, 3).matmul(M[:, :3].transpose( 1, 0))[:, :3].contiguous().view(pos.shape) normals = estimate_surface_normals_plane_fit(pos, None) normals_ = get_data(normals) pos_grad = get_data(grad_spatial2d(pos)) dot_prod = np.sum(pos_grad * normals_[np.newaxis, ...], axis=-1) np.testing.assert_array_almost_equal(dot_prod, np.zeros_like(dot_prod)) nc_cost = get_data(normal_consistency_cost(pos, normals, 1)) print('nc_cost', nc_cost) np.testing.assert_almost_equal(nc_cost, 0.0)
def transform_model(obj, scale, rotate, translate): """Order of transformation is: scale -> rotate -> translate i.e., M = translate * rotate * scale Args: obj: scale: rotate: translate: Returns: """ v = obj['v'] if scale is not None: v = v * np.array(scale)[np.newaxis, :] if rotate is not None: rotation_axis = rotate['axis'] angle_deg = rotate['angle_deg'] M = axis_angle_matrix(axis=rotation_axis, angle=np.deg2rad(angle_deg)) v = np.matmul(v, M.transpose(1, 0)[:3, :3]) if translate is not None: v = v + np.array(translate)[np.newaxis, :] obj['v'] = v return obj
def __getitem__(self, idx): """Get item.""" # Get object path obj_path1 = os.path.join(self.opt.root_dir1, 'cube.obj') obj_path2 = os.path.join(self.opt.root_dir2, 'sphere_halfbox.obj') obj_path3 = os.path.join(self.opt.root_dir3, 'cone.obj') # obj_path4 = os.path.join(self.opt.root_dir4, self.samples[idx]) if not self.loaded: self.fg_obj1 = load_model(obj_path1) self.fg_obj2 = load_model(obj_path2) self.fg_obj3 = load_model(obj_path3) # self.fg_obj4 = load_model(obj_path4) self.bg_obj = load_model(self.opt.bg_model) self.loaded = True offset_id = np.random.permutation(4) obj_model1 = self.fg_obj1 obj_model2 = self.fg_obj2 obj_model3 = self.fg_obj3 # obj_model4 = self.fg_obj4 obj2 = self.bg_obj v11 = (obj_model1['v'] - obj_model1['v'].mean()) / ( obj_model1['v'].max() - obj_model1['v'].min()) v12 = (obj_model2['v'] - obj_model2['v'].mean()) / ( obj_model2['v'].max() - obj_model2['v'].min()) v13 = (obj_model3['v'] - obj_model3['v'].mean()) / ( obj_model3['v'].max() - obj_model3['v'].min()) # v14 = (obj_model4['v'] - obj_model4['v'].mean()) / (obj_model4['v'].max() - obj_model4['v'].min()) v2 = obj2['v'] # / (obj2['v'].max() - obj2['v'].min()) scale = (obj2['v'].max() - obj2['v'].min()) * 0.22 offset = np.array([[6.9, 6.9, 7.0], [20.4, 6.7, 6.7], [20.4, 6.7, 20.2], [7.0, 6.7, 20.4] ]) #+ 2 * np.random.rand(3) #[8.0, 5.0, 18.0], if self.opt.only_background: v = v2 f = obj2['f'] elif self.opt.only_foreground: v = v1 f = obj_model['f'] else: if self.opt.random_rotation: random_axis = np_normalize(np.random.rand(3)) random_angle = np.random.rand(1) * np.pi * 2 M = axis_angle_matrix(axis=random_axis, angle=random_angle) M[:3, 3] = offset[offset_id[0]] + 1.5 * np.random.randn(3) v11 = np.matmul(scale * v11, M.transpose(1, 0)[:3, :3]) + M[:3, 3] else: # random_axis = np_normalize(np.random.rand(3)) # random_angle = np.random.rand(1) * np.pi * 2 # M = axis_angle_matrix(axis=random_axis, angle=random_angle) # M[:3, 3] = offset[offset_id[0]]#+1.5*np.random.randn(3) # v11 = np.matmul(scale * v11, M.transpose(1, 0)[:3, :3]) + M[:3, 3] # # random_axis2 = np_normalize(np.random.rand(3)) # random_angle2 = np.random.rand(1) * np.pi * 2 # M2 = axis_angle_matrix(axis=random_axis2, angle=random_angle2) # M2[:3, 3] = offset[offset_id[2]]#+1.5*np.random.randn(3) # v13 = np.matmul(scale * v13, M2.transpose(1, 0)[:3, :3]) + M2[:3, 3] v11 = scale * v11 + offset[ offset_id[0]] + 1.5 * np.random.randn(3) v12 = scale * v12 + offset[ offset_id[1]] + 1.5 * np.random.randn(3) v13 = scale * v13 + offset[ offset_id[2]] + 1.5 * np.random.randn(3) # v14 = scale * v14 + offset[offset_id[3]] # v = np.concatenate((v11,v12,v13,v14, v2)) # f = np.concatenate((obj_model1['f'],obj_model2['f']+ v11.shape[0],obj_model3['f']+ v12.shape[0],obj_model4['f']+ v13.shape[0],obj2['f'] + v14.shape[0])) v = np.concatenate((v11, v12, v13, v2)) #import ipdb; ipdb.set_trace() f = np.concatenate( (obj_model1['f'], obj_model2['f'] + v11.shape[0], obj_model3['f'] + v12.shape[0] + v11.shape[0], obj2['f'] + v13.shape[0] + v12.shape[0] + v11.shape[0])) obj_model = {'v': v, 'f': f} if self.opt.use_mesh: # normalize the vertices v = obj_model['v'] axis_range = np.max(v, axis=0) - np.min(v, axis=0) v = (v - np.mean(v, axis=0)) / max( axis_range) # Normalize to make the largest spread 1 obj_model['v'] = v mesh = obj_to_triangle_spec(obj_model) meshes = { 'face': mesh['face'].astype(np.float32), 'normal': mesh['normal'].astype(np.float32) } sample = {'synset': 0, 'mesh': meshes} else: # Sample points from the 3D mesh v, vn = uniform_sample_mesh(obj_model, num_samples=self.opt.n_splats) # Normalize the vertices v = (v - np.mean(v, axis=0)) / (v.max() - v.min()) # Save the splats splats = { 'pos': v.astype(np.float32), 'normal': vn.astype(np.float32) } # Add model and synset to the output dictionary sample = {'synset': 0, 'splats': splats} # Transform if self.transform: sample = self.transform(sample) return sample
def __getitem__(self, idx): """Get item.""" # Get object path obj_path = os.path.join(self.opt.root_dir, self.samples[idx]) if not self.loaded: self.fg_obj = load_model(obj_path) self.bg_obj = load_model(self.opt.bg_model) self.loaded = True obj_model = self.fg_obj obj2 = self.bg_obj v1 = (obj_model['v'] - obj_model['v'].mean()) / (obj_model['v'].max() - obj_model['v'].min()) v2 = obj2['v'] # / (obj2['v'].max() - obj2['v'].min()) scale = (obj2['v'].max() - obj2['v'].min()) * 0.4 offset = np.array([14.0, 8.0, 12.0]) #+ 2 * np.abs(np.random.randn(3)) if self.opt.only_background: v = v2 f = obj2['f'] elif self.opt.only_foreground: v = v1 f = obj_model['f'] else: if self.opt.random_rotation: random_axis = np_normalize(self.opt.axis) random_angle = np.random.rand(1) * np.pi * 2 M = axis_angle_matrix(axis=random_axis, angle=random_angle) M[:3, 3] = offset v1 = np.matmul(scale * v1, M.transpose(1, 0)[:3, :3]) + M[:3, 3] else: v1 = scale * v1 + offset v = np.concatenate((v1, v2)) f = np.concatenate((obj_model['f'], obj2['f'] + v1.shape[0])) obj_model = {'v': v, 'f': f} if self.opt.use_mesh: # normalize the vertices v = obj_model['v'] axis_range = np.max(v, axis=0) - np.min(v, axis=0) v = (v - np.mean(v, axis=0)) / max( axis_range) # Normalize to make the largest spread 1 obj_model['v'] = v mesh = obj_to_triangle_spec(obj_model) meshes = { 'face': mesh['face'].astype(np.float32), 'normal': mesh['normal'].astype(np.float32) } sample = {'synset': 0, 'mesh': meshes} else: # Sample points from the 3D mesh v, vn = uniform_sample_mesh(obj_model, num_samples=self.opt.n_splats) # Normalize the vertices v = (v - np.mean(v, axis=0)) / (v.max() - v.min()) # Save the splats splats = { 'pos': v.astype(np.float32), 'normal': vn.astype(np.float32) } # Add model and synset to the output dictionary sample = {'synset': 0, 'splats': splats} # Transform if self.transform: sample = self.transform(sample) return sample