def train_validation_set_2(model_input=None, model_output=None): data_v = DataSources.load_validation_dataset2() data = DataSources.load_naive_augmented_dataset(DataSources.DataSources.VALIDATION_SET2_NG) # data = data[:] run_train(data, data_v, model_input=model_input, model_output=model_output, epochs=50)
def train_300w_3d_helen_naive_augmentations(data_sources: [DataSources.DataSources], model_input, model_output, limit=-1): log.info('train_300w_3d_helen_naive_augmentations::') data_v: [Data] = DataSources.load_validation_dataset2(recalc_pose=True) data: [Data] = [] for data_source in data_sources: data += DataSources.load_naive_augmented_dataset(data_source, limit=limit) if limit > -1: np.random.shuffle(data) data = data[:limit] run_train(data, data_v, model_input=model_input, model_output=model_output, epochs=30)
def validate_pose_vs_landmarks(): import DataSources import GenerateTrainingSet data = DataSources.load_naive_augmented_dataset( DataSources.DataSources.VALIDATION_SET2_NG) data = DataSources.load_validation_dataset2( DataSources.DataSources.VALIDATION_2, recalc_pose=False) total_theta = 0 for data_ in data: face_model = GenerateTrainingSet.get_face_model() rot_mat_orig, _ = cv2.Rodrigues(data_.pose[:3]) rotation_vecs, translation_vecs = GenerateTrainingSet.solve_pnp( data_.landmarks_2d, face_model.model_TD, face_model) rot_mat_land, _ = cv2.Rodrigues(rotation_vecs) theta = Utils.get_theta_between_rot_mats(rot_mat_orig, rot_mat_land) total_theta += theta print(np.rad2deg(total_theta / len(data)))
def test_300w_3d_helen1(model_input=None, limit=-1, is_6pos=False, model_name='c2_net'): log.info('run_validation_set2::') data: [Data] = DataSources.load_naive_augmented_dataset(DataSources.DataSources._300W_3D_HELEN_NG1, limit=limit) predict(data, model_input, limit, is_6pos, model_name=model_name)