Esempio n. 1
0
    def predict(self,
                test_x,
                out_scaler,
                gen_test_file_list,
                sequential_training=False,
                stateful=False):
        #### compute predictions ####
        io_funcs = BinaryIOCollection()

        test_file_number = len(gen_test_file_list)
        print("generating features on held-out test data...")
        for utt_index in range(test_file_number):
            gen_test_file_name = gen_test_file_list[utt_index]
            test_id = os.path.splitext(os.path.basename(gen_test_file_name))[0]
            temp_test_x = test_x[test_id]
            num_of_rows = temp_test_x.shape[0]

            if stateful:
                temp_test_x = data_utils.get_stateful_input(
                    temp_test_x, self.seq_length, self.batch_size)
            elif sequential_training:
                temp_test_x = np.reshape(temp_test_x,
                                         (1, num_of_rows, self.n_in))

            predictions = self.model.predict(temp_test_x)
            if sequential_training:
                predictions = np.reshape(predictions,
                                         (num_of_rows, self.n_out))

            data_utils.denorm_data(predictions, out_scaler)

            io_funcs.array_to_binary_file(predictions, gen_test_file_name)
            data_utils.drawProgressBar(utt_index + 1, test_file_number)

        sys.stdout.write("\n")
Esempio n. 2
0
    def predict(self, test_x, out_scaler, gen_test_file_list, sequential_training=False, stateful=False):
        #### compute predictions ####
        io_funcs = BinaryIOCollection()

        test_file_number = len(gen_test_file_list)
        print("generating features on held-out test data...")
        for utt_index in range(test_file_number):
            gen_test_file_name = gen_test_file_list[utt_index]
            test_id = os.path.splitext(os.path.basename(gen_test_file_name))[0]
            temp_test_x        = test_x[test_id]
            num_of_rows        = temp_test_x.shape[0]

            if stateful:
                temp_test_x = data_utils.get_stateful_input(temp_test_x, self.seq_length, self.batch_size)
            elif sequential_training:
                temp_test_x = np.reshape(temp_test_x, (1, num_of_rows, self.n_in))

            predictions = self.model.predict(temp_test_x)
            if sequential_training:
                predictions = np.reshape(predictions, (num_of_rows, self.n_out))

            data_utils.denorm_data(predictions, out_scaler)

            io_funcs.array_to_binary_file(predictions, gen_test_file_name)
            data_utils.drawProgressBar(utt_index+1, test_file_number)

        sys.stdout.write("\n")
Esempio n. 3
0
    def get_validation_error(self,
                             valid_x,
                             valid_y,
                             sequential_training=True,
                             stateful=False):
        valid_id_list = list(valid_x.keys())
        valid_id_list.sort()

        valid_error = 0.0
        valid_file_number = len(valid_id_list)
        for utt_index in range(valid_file_number):
            temp_valid_x = valid_x[valid_id_list[utt_index]]
            temp_valid_y = valid_y[valid_id_list[utt_index]]
            num_of_rows = temp_valid_x.shape[0]

            if stateful:
                temp_valid_x = data_utils.get_stateful_input(
                    temp_valid_x, self.seq_length, self.batch_size)
            elif sequential_training:
                temp_valid_x = np.reshape(temp_valid_x,
                                          (1, num_of_rows, self.n_in))

            predictions = self.model.predict(temp_valid_x)
            if sequential_training:
                predictions = np.reshape(predictions,
                                         (num_of_rows, self.n_out))

            valid_error += np.mean(
                np.sum((predictions - temp_valid_y)**2, axis=1))

        valid_error = valid_error / valid_file_number

        return valid_error
Esempio n. 4
0
    def get_validation_error(self, valid_x, valid_y, sequential_training=True, stateful=False):
        valid_id_list = list(valid_x.keys())
        valid_id_list.sort()

        valid_error = 0.0
        valid_file_number = len(valid_id_list)
        for utt_index in range(valid_file_number):
            temp_valid_x = valid_x[valid_id_list[utt_index]]
            temp_valid_y = valid_y[valid_id_list[utt_index]]
            num_of_rows = temp_valid_x.shape[0]

            if stateful:
                temp_valid_x = data_utils.get_stateful_input(temp_valid_x, self.seq_length, self.batch_size)
            elif sequential_training:
                temp_valid_x = np.reshape(temp_valid_x, (1, num_of_rows, self.n_in))

            predictions = self.model.predict(temp_valid_x)
            if sequential_training:
                predictions = np.reshape(predictions, (num_of_rows, self.n_out))

            valid_error += np.mean(np.sum((predictions - temp_valid_y) ** 2, axis=1))

        valid_error = valid_error/valid_file_number

        return valid_error