print("learning_rate: {}".format(learning_rate))
        print("lambda_loss_amount: {}".format(lambda_loss_amount))
        print("")

        class EditedConfig(Config):
            def __init__(self, X, Y):
                super(EditedConfig, self).__init__(X, Y)

                # Edit only some parameters:
                self.learning_rate = learning_rate
                self.lambda_loss_amount = lambda_loss_amount
                # Architecture params:
                self.n_layers_in_highway = n_layers_in_highway
                self.n_stacked_layers = n_stacked_layers

        accuracy_out, best_accuracy, f1_score_out, best_f1_score = run_with_config(
            EditedConfig, X_train, y_train, X_test, y_test)
        print(accuracy_out, best_accuracy, f1_score_out, best_f1_score)

        with open('{}_result_opportunity_18.txt'.format(trial_name), 'a') as f:
            f.write(
                """str(learning_rate)+' \t'+str(lambda_loss_amount)+' \t'+str(accuracy_out)+' \t'+str(best_accuracy)+' \t'+str(f1_score_out)+' \t'+str(best_f1_score)\n"""
            )
            f.write(
                str(learning_rate) + ' \t' + str(lambda_loss_amount) + ' \t' +
                str(accuracy_out) + ' \t' + str(best_accuracy) + ' \t' +
                str(f1_score_out) + ' \t' + str(best_f1_score) + '\n\n')

        print("________________________________________________________")
    print("")
print("Done.")
Example #2
0
                    super(EditedConfig, self).__init__(X, Y)

                    # Edit only some parameters:
                    self.learning_rate = learning_rate
                    self.lambda_loss_amount = lambda_loss_amount  # 正则化惩罚项,值应该很小,防止过拟合
                    self.clip_gradients = clip_gradients
                    # Architecture params:
                    self.n_layers_in_highway = n_layers_in_highway
                    self.n_stacked_layers = n_stacked_layers

            # # Useful catch upon looping (e.g.: not enough memory)
            # try:
            #     accuracy_out, best_accuracy = run_with_config(EditedConfig)
            # except:
            #     accuracy_out, best_accuracy = -1, -1
            accuracy_out, best_accuracy, f1_score_out, best_f1_score = (
                run_with_config(EditedConfig, X_train, y_train, X_test,
                                y_test))
            print(accuracy_out, best_accuracy, f1_score_out, best_f1_score)

            with open('{}_result_HAR_6.txt'.format(trial_name), 'a') as f:
                f.write(
                    str(learning_rate) + ' \t' + str(lambda_loss_amount) +
                    ' \t' + str(clip_gradients) + ' \t' + str(accuracy_out) +
                    ' \t' + str(best_accuracy) + ' \t' + str(f1_score_out) +
                    ' \t' + str(best_f1_score) + '\n\n')

            print("________________________________________________________")
        print("")
print("Done.")
                    super(EditedConfig, self).__init__(X, Y)

                    # Edit only some parameters:
                    self.learning_rate = learning_rate
                    self.lambda_loss_amount = lambda_loss_amount
                    self.clip_gradients = None
                    # Architecture params:
                    self.n_layers_in_highway = n_layers_in_highway
                    self.n_stacked_layers = n_stacked_layers

            # # Useful catch upon looping (e.g.: not enough memory)
            # try:
            #     accuracy_out, best_accuracy = run_with_config(EditedConfig)
            # except:
            #     accuracy_out, best_accuracy = -1, -1
            accuracy_out, best_accuracy, f1_score_out, best_f1_score = (
                run_with_config(EditedConfig, X_train, y_train, X_test, y_test,
                                learning_rate))
            print(accuracy_out, best_accuracy, f1_score_out, best_f1_score)

            with open('{}_result_emotion_2.txt'.format(trial_name), 'a') as f:
                f.write(
                    str(learning_rate) + ' \t' + str(lambda_loss_amount) +
                    ' \t' + str(clip_gradients) + ' \t' + str(accuracy_out) +
                    ' \t' + str(best_accuracy) + ' \t' + str(f1_score_out) +
                    ' \t' + str(best_f1_score) + '\n\n')

            print "________________________________________________________"
        print ""
print "Done."