def train_validation_set_2(model_input=None, model_output=None): data_v = DataSources.load_validation_dataset2() data = DataSources.load_naive_augmented_dataset(DataSources.DataSources.VALIDATION_SET2_NG) # data = data[:] run_train(data, data_v, model_input=model_input, model_output=model_output, epochs=50)
def train_300w_3d_helen_naive_augmentations(data_sources: [DataSources.DataSources], model_input, model_output, limit=-1): log.info('train_300w_3d_helen_naive_augmentations::') data_v: [Data] = DataSources.load_validation_dataset2(recalc_pose=True) data: [Data] = [] for data_source in data_sources: data += DataSources.load_naive_augmented_dataset(data_source, limit=limit) if limit > -1: np.random.shuffle(data) data = data[:limit] run_train(data, data_v, model_input=model_input, model_output=model_output, epochs=30)
def validate_load_image(): import DataSources from detect_face import DetectFace from keras.preprocessing import image # from keras.applications.resnet50 import preprocess_input data = DataSources.load_validation_dataset2() data: [Data] = DetectFace.get_face_bboxes(data[:1]) image_array = load_image(data[0]) image_array = preprocess_input(image_array, mode='tf') img = image.load_img(data[0].image, target_size=(224, 224)) x = image.img_to_array(img) x = np.expand_dims(x, axis=0) x = preprocess_input(x, mode='tf') print('done')
def validate_pose_vs_landmarks(): import DataSources import GenerateTrainingSet data = DataSources.load_naive_augmented_dataset( DataSources.DataSources.VALIDATION_SET2_NG) data = DataSources.load_validation_dataset2( DataSources.DataSources.VALIDATION_2, recalc_pose=False) total_theta = 0 for data_ in data: face_model = GenerateTrainingSet.get_face_model() rot_mat_orig, _ = cv2.Rodrigues(data_.pose[:3]) rotation_vecs, translation_vecs = GenerateTrainingSet.solve_pnp( data_.landmarks_2d, face_model.model_TD, face_model) rot_mat_land, _ = cv2.Rodrigues(rotation_vecs) theta = Utils.get_theta_between_rot_mats(rot_mat_orig, rot_mat_land) total_theta += theta print(np.rad2deg(total_theta / len(data)))
def naive_augment_validation_set2(): output_folder = '../augmented/validation_set2' data: [Data] = DataSources.load_validation_dataset2() gen_naive_augmentations(data, output_folder)
def run_validation_set2(model_input=None, limit=-1, is_6pos=False, model_name='c2_net'): log.info('run_validation_set2::') data: [Data] = DataSources.load_validation_dataset2() predict(data, model_input, limit, is_6pos, model_name=model_name)