Esempio n. 1
0
def main():
    node_list = [
        NodeGene(node_id=0, node_type='source'),
        NodeGene(node_id=1, node_type='source'),
        NodeGene(node_id=2, node_type='output', bias=1),
        NodeGene(node_id=3, node_type='output', bias=1),
        NodeGene(node_id=4, node_type='output', bias=1)
    ]

    connection_list = [
        ConnectionGene(input_node=0,
                       output_node=2,
                       innovation_number=1,
                       weight=-0.351,
                       enabled=True),
        ConnectionGene(input_node=0,
                       output_node=3,
                       innovation_number=2,
                       weight=-0.351,
                       enabled=True),
        ConnectionGene(input_node=0,
                       output_node=4,
                       innovation_number=3,
                       weight=-0.351,
                       enabled=True),
        ConnectionGene(input_node=1,
                       output_node=2,
                       innovation_number=4,
                       weight=-0.351,
                       enabled=True),
        ConnectionGene(input_node=1,
                       output_node=3,
                       innovation_number=5,
                       weight=-0.351,
                       enabled=True),
        ConnectionGene(input_node=1,
                       output_node=4,
                       innovation_number=6,
                       weight=-0.351,
                       enabled=True)
    ]

    genome = GenomeMultiClass(connections=connection_list,
                              nodes=node_list,
                              key=3)
    x_data, y_data = create_data(n_generated=500)
    genome_nn = GenomeNeuralNetwork(genome=genome,
                                    create_weights_bias_from_genome=False,
                                    activation_type='sigmoid',
                                    learning_rate=0.1,
                                    x_train=x_data,
                                    y_train=y_data)
    print(genome.num_layers_including_input)
    print(genome.constant_weight_connections)
    print(genome.layer_connections_dict)
Esempio n. 2
0
def main():

    # Keep a consistent seed to make debugging easier TODO: Check if this work's across files
    np.random.seed(1)

    x_data, y_data = create_data(n_generated=5000)

    neat = NEAT(x_training_data=x_data,
                y_training_data=y_data,
                config=Config,
                fitness_threshold=0.01)

    neat.run(max_num_generations=10000,
             use_backprop=True,
             print_generation_information=True)
def get_genome_predictions(genome, x_data):
    """
    Function to return predictions for a given genome
    :param genome: The genome class instance
    :param x_data:  The data to be predicted on
    :param y_data: The true labels for the data
    :return: the predictions for the given x_data
    """
    # y_data isn't important but it's needed as a parameter
    _, y_data = create_data(n_generated=500)
    genome_nn = NEAT.create_genome_nn(genome=genome,
                                      x_data=x_data,
                                      y_data=y_data)
    return genome_nn.run_one_pass(input_data=x_data,
                                  return_prediction_only=True).round()
def main():
    np.random.seed(1)

    # Choose which algorithm is running using keys
    algorithm_options = {0: 'xor_full', 1: 'shm_multi_class'}
    algorithm_running = algorithm_options[1]

    if algorithm_running == algorithm_options[0]:
        num_data_to_generate = 6250

        # Create data
        x_data, y_data = create_data(n_generated=num_data_to_generate,
                                     add_noise=False,
                                     use_one_hot=True)
    elif algorithm_running == algorithm_options[1]:
        # Create data
        x_data, y_data = get_shm_multi_class_data()
        num_data_to_generate = len(x_data)

    # Training data
    training_percentage = 0.8
    training_upper_limit_index = round(num_data_to_generate *
                                       training_percentage)
    x_training = x_data[0:training_upper_limit_index]
    y_training = y_data[0:training_upper_limit_index]

    # Test data
    x_test = x_data[training_upper_limit_index:]
    y_test = y_data[training_upper_limit_index:]

    f1_score_threshold = 0.95 if algorithm_running != algorithm_options[
        1] else None
    fitness_threshold = -0.1 if algorithm_running != algorithm_options[
        1] else None

    neat = NEATMultiClass(x_training_data=x_training,
                          y_training_data=y_training,
                          x_test_data=x_test,
                          y_test_data=y_test,
                          config=ConfigMultiClass,
                          fitness_threshold=fitness_threshold,
                          f1_score_threshold=f1_score_threshold,
                          algorithm_running=algorithm_running)

    neat.run(max_num_generations=250,
             use_backprop=True,
             print_generation_information=True,
             show_population_weight_distribution=False)
def main():
    # DATA
    x_data, y_data = create_data(n_generated=200, add_noise=True)
    x_circle, y_circle = get_circle_data()
    x_spiral, y_spiral = get_spiral_data()

    # X1, X2 for all datasets
    feature_1_xor = x_data[:, 0]
    feature_2_xor = x_data[:, 1]
    feature_1_circle = x_circle[:, 0]
    feature_2_circle = x_circle[:, 1]
    feature_1_spiral = x_spiral[:, 0]
    feature_2_spiral = x_spiral[:, 1]

    plot_data = False
    show_decision_boundary = False
    visualise_generation = False
    plot_confusion_matrix = False
    visualise_population_complexity = False
    plot_shm_data_figure = True

    font_size = 20
    # PLOT DATA
    if plot_data:
        # TODO: Add legends
        plt.scatter(feature_1_xor,
                    feature_2_xor,
                    color=create_label_colours(labels=y_data))
        plt.title('XOR Data', fontsize=font_size)
        plt.xlabel('X1', fontsize=font_size)
        plt.ylabel('X2', fontsize=font_size)
        plt.tick_params(axis='both', which='major', labelsize=10)
        plt.xticks(fontsize=font_size)
        plt.yticks(fontsize=font_size)
        plt.show()

        fig, ax = plt.subplots()
        label_colours = create_label_colours(labels=y_data)
        x1_reds = []
        x2_reds = []
        x1_greens = []
        x2_greens = []
        for index in range(len(label_colours)):
            if label_colours[index] == 'green':
                x1_greens.append(feature_1_xor[index])
                x2_greens.append(feature_2_xor[index])
            else:
                x1_reds.append(feature_1_xor[index])
                x2_reds.append(feature_2_xor[index])

        ax.scatter(x1_greens,
                   x2_greens,
                   c='green',
                   label='Class 1',
                   alpha=1,
                   edgecolors='none')
        ax.scatter(x1_reds,
                   x2_reds,
                   c='red',
                   label='Class 0',
                   alpha=1,
                   edgecolors='none')
        ax.legend(loc='upper right')
        plt.xlabel('X1')
        plt.ylabel('X2')
        plt.show()

        # plt.scatter(feature_1_circle, feature_2_circle, color=create_label_colours(labels=y_circle))
        # plt.title('Circle Data')
        # plt.xlabel('X1')
        # plt.ylabel('X2')
        # plt.show()
        # plt.scatter(feature_1_spiral, feature_2_spiral, color=create_label_colours(labels=y_spiral))
        # plt.title('Spiral Data')
        # plt.xlabel('X1')
        # plt.ylabel('X2')
        # plt.show()

    if show_decision_boundary:
        # Test genome accuracy
        genome = initialise_genome(
            genome_pickle_filepath=
            'pickles/best_genome_pickle_shm_two_class_618056')
        plot_decision_boundary(genome=genome, data_being_used='shm_two_class')

    if visualise_generation:
        visualise_generation_tracker(
            filepath_to_genome=
            'algorithm_runs/xor_small_noise/run_1/generation_tracker')
    if visualise_population_complexity:
        plot_population_complexity(
            filepath_to_neat_instance=
            'algorithm_runs/xor_small_noise/run_1/NEAT_instance',
            font_size=None)

    if plot_confusion_matrix:
        create_confusion_matrix()
    if plot_shm_data_figure:
        plot_shm_data(rotation_angle=30, elevation=-160)
Esempio n. 6
0
def main():
    # Keep a consistent seed to make debugging easier TODO: Check if this work's across files
    np.random.seed(1)

    algorithm_options = {
        0: 'xor_full',
        1: 'xor_small_noise',
        2: 'circle_data',
        3: 'shm_two_class',
        4: 'spiral_data'
    }
    # Choose which algorithm is running using keys
    algorithm_running = algorithm_options[3]

    if algorithm_running == algorithm_options[0]:
        num_data_to_generate = 6250

        # Create data
        x_data, y_data = create_data(n_generated=num_data_to_generate,
                                     add_noise=False)
    elif algorithm_running == algorithm_options[1]:
        num_data_to_generate = 300

        # Create data
        x_data, y_data = create_data(n_generated=num_data_to_generate,
                                     add_noise=True)
    elif algorithm_running == algorithm_options[2]:
        x_data, y_data = get_circle_data()
        x_data = x_data[:, 0:2]
        y_data.shape = (len(x_data), 1)
        for row in range(y_data.shape[0]):
            if y_data[row, 0] == -1:
                y_data[row, 0] = 0
        num_data_to_generate = len(x_data)
    elif algorithm_running == algorithm_options[3]:
        x_data, y_data = get_shm_two_class_data()
        num_data_to_generate = len(x_data)
    elif algorithm_running == algorithm_options[4]:
        x_data, y_data = get_spiral_data()
        x_data = x_data[:, 0:2]
        y_data.shape = (len(x_data), 1)
        for row in range(y_data.shape[0]):
            if y_data[row, 0] == -1:
                y_data[row, 0] = 0
        num_data_to_generate = len(x_data)

    # Training data
    training_percentage = 0.8
    training_upper_limit_index = round(num_data_to_generate *
                                       training_percentage)
    x_training = x_data[0:training_upper_limit_index]
    y_training = y_data[0:training_upper_limit_index]

    # Test data
    x_test = x_data[training_upper_limit_index:]
    y_test = y_data[training_upper_limit_index:]

    neat = NEAT(x_training_data=x_training,
                y_training_data=y_training,
                x_test_data=x_test,
                y_test_data=y_test,
                config=Config,
                fitness_threshold=-0.000001,
                f1_score_threshold=0.95,
                algorithm_running=algorithm_running)

    start_evaluate_time = time.time()
    neat.run(max_num_generations=10000,
             use_backprop=True,
             print_generation_information=True,
             show_population_weight_distribution=False)
    end_evaluate_time = time.time()
    total_time = end_evaluate_time - start_evaluate_time
    print(total_time)
def main():
    # DATA
    x_data, y_data = create_data(n_generated=200, add_noise=True)
    x_circle, y_circle = get_circle_data()
    x_spiral, y_spiral = get_spiral_data()

    # X1, X2 for all datasets
    feature_1_xor = x_data[:, 0]
    feature_2_xor = x_data[:, 1]
    feature_1_circle = x_circle[:, 0]
    feature_2_circle = x_circle[:, 1]
    feature_1_spiral = x_spiral[:, 0]
    feature_2_spiral = x_spiral[:, 1]

    plot_data = False
    show_decision_boundary = False
    visualise_generation = False
    visualise_population_complexity = False
    get_table_values = False
    plot_confusion_matrix = False
    plot_figure_shm_data = True
    plot_figure_model_complexity_during_evolution = False
    plot_figure_shm_multi = False
    # experiment_path = 'algorithm_runs\\xor_small_noise'
    experiment_path = 'algorithm_runs\\shm_two_class'
    # experiment_path = 'algorithm_runs\\shm_two_class'
    # experiment_path = 'algorithm_runs_multi\\shm_multi_class'

    # PLOT DATA
    if plot_data:
        # TODO: Add legends
        colors = create_label_colours(labels=y_data)
        fig, ax = plt.subplots()
        for x1, x2, color in zip(feature_1_xor, feature_2_xor, colors):
            ax.scatter(
                x1,
                x2,
                label=color,
            )
        plt.title('XOR Data')
        plt.xlabel('X1')
        plt.ylabel('X2')
        ax.legend()
        plt.show()
        plt.scatter(feature_1_circle,
                    feature_2_circle,
                    color=create_label_colours(labels=y_circle))
        plt.title('Circle Data')
        plt.xlabel('X1')
        plt.ylabel('X2')
        plt.show()
        plt.scatter(feature_1_spiral,
                    feature_2_spiral,
                    color=create_label_colours(labels=y_spiral))
        plt.title('Spiral Data')
        plt.xlabel('X1')
        plt.ylabel('X2')
        plt.show()

    if show_decision_boundary:
        # Test genome accuracy
        plot_decision_boundary(experiments_path=experiment_path,
                               data_being_used='xor_data')

    if visualise_generation:
        visualise_generation_tracker(experiments_path=experiment_path)
    if visualise_population_complexity:
        plot_population_complexity(experiments_path=experiment_path)

    if plot_confusion_matrix:
        create_confusion_matrix(x_data=x_data,
                                y_data=y_data,
                                experiments_path=experiment_path)

    if plot_figure_model_complexity_during_evolution:
        plot_model_complexity_during_evolution(
            experiments_path=experiment_path)

    if get_table_values:
        get_avg_table_values(experiments_path=experiment_path)

    if plot_figure_shm_data:
        plot_shm_data(rotation_angle=30,
                      elevation=-160,
                      experiments_path='algorithm_runs/shm_two_class')

    if plot_figure_shm_multi:
        plot_shm_multi_data(
            experiments_path='algorithm_runs_multi/shm_multi_class')