Exemple #1
0
    def create_pca(self, dataset_name, pca_postfix):
        tf_record_util = TFRecordUtility()

        lbl_arr = []
        pose_arr = []
        if dataset_name == DatasetName.ibug:
            lbl_arr, img_arr, pose_arr = tf_record_util.retrieve_tf_record(
                IbugConf.tf_train_path,
                IbugConf.sum_of_train_samples,
                only_label=True,
                only_pose=True)
        lbl_arr = np.array(lbl_arr)

        print('PCA-retrieved')
        '''need to be normalized based on the hyper face paper?'''

        # reduced_lbl_arr, eigenvalues, eigenvectors = self.__svd_func(lbl_arr, pca_postfix)
        reduced_lbl_arr, eigenvalues, eigenvectors = self.__func_PCA(
            lbl_arr, pca_postfix)
        mean_lbl_arr = np.mean(lbl_arr, axis=0)
        eigenvectors = eigenvectors.T

        self.__save_obj(
            eigenvalues,
            dataset_name + self.__eigenvalues_prefix + str(pca_postfix))
        self.__save_obj(
            eigenvectors,
            dataset_name + self.__eigenvectors_prefix + str(pca_postfix))
        self.__save_obj(
            mean_lbl_arr,
            dataset_name + self.__meanvector_prefix + str(pca_postfix))
        '''calculate pose min max'''
        p_1_arr = []
        p_2_arr = []
        p_3_arr = []

        for p_item in pose_arr:
            p_1_arr.append(p_item[0])
            p_2_arr.append(p_item[1])
            p_3_arr.append(p_item[2])

        p_1_min = min(p_1_arr)
        p_1_max = max(p_1_arr)

        p_2_min = min(p_2_arr)
        p_2_max = max(p_2_arr)

        p_3_min = min(p_3_arr)
        p_3_max = max(p_3_arr)

        self.__save_obj(p_1_min, 'p_1_min')
        self.__save_obj(p_1_max, 'p_1_max')

        self.__save_obj(p_2_min, 'p_2_min')
        self.__save_obj(p_2_max, 'p_2_max')

        self.__save_obj(p_3_min, 'p_3_min')
        self.__save_obj(p_3_max, 'p_3_max')

        print('PCA-->done')
Exemple #2
0
    def test_pca_validity(self, pca_postfix):
        cnn_model = CNNModel()
        pca_utility = PCAUtility()
        tf_record_utility = TFRecordUtility()
        image_utility = ImageUtility()

        eigenvalues, eigenvectors, meanvector = pca_utility.load_pca_obj(
            dataset_name=DatasetName.ibug, pca_postfix=pca_postfix)

        lbl_arr, img_arr, pose_arr = tf_record_utility.retrieve_tf_record(
            tfrecord_filename=IbugConf.tf_train_path,
            number_of_records=30,
            only_label=False)
        for i in range(20):
            b_vector_p = self.calculate_b_vector(lbl_arr[i], True, eigenvalues,
                                                 eigenvectors, meanvector)
            lbl_new = meanvector + np.dot(eigenvectors, b_vector_p)

            labels_true_transformed, landmark_arr_x_t, landmark_arr_y_t = image_utility. \
                create_landmarks_from_normalized(lbl_arr[i], 224, 224, 112, 112)

            labels_true_transformed_pca, landmark_arr_x_pca, landmark_arr_y_pca = image_utility. \
                create_landmarks_from_normalized(lbl_new, 224, 224, 112, 112)

            image_utility.print_image_arr(i, img_arr[i], landmark_arr_x_t,
                                          landmark_arr_y_t)
            image_utility.print_image_arr(i * 1000, img_arr[i],
                                          landmark_arr_x_pca,
                                          landmark_arr_y_pca)