예제 #1
0
 def init_population(self,
                     training_data,
                     num_inputs,
                     num_outputs,
                     num_samples,
                     min_weight=-1,
                     max_weight=1,
                     min_hidden=5,
                     max_hidden=15,
                     min_layer=2,
                     max_layer=5,
                     input_type=None,
                     output_type=None,
                     type_init='random',
                     mut_chance=20,
                     mutate_data=True):
     """
     Returns a list of neural networks with randomized features
     :param training_data: Inputs and outputs of the training data
     :param num_inputs: Number of inputs in the neural network
     :param num_outputs: Number of outputs in the neural network
     :param num_samples: Number of samples from the training data to use (reduces amount of computation)
     :param min_weight: Min weight that can be initialized
     :param max_weight: Max weight that can be initialized
     :param min_hidden: Min number of hidden nodes in a given layer
     :param max_hidden: Max number of hidden nodes in a given layer
     :param min_layer: Min number of hidden layers
     :param max_layer: Max number of hidden layers
     :param input_type: 'raw_input', or a type of fuzzy input
     :param output_type: 'raw_output', or a type of fuzzy output
     :param type_init: 'select' or 'random'
     :param mut_chance: Mutation chance for each individual weight
     :param mutate_data: Option to mutate data from raw to fuzzy
     :return: List of neural networks
     """
     return [
         ForwardNet(training_data=training_data,
                    weight_init='random',
                    layer_counts=ForwardNet.init_hidden_layers(
                        num_inputs,
                        num_outputs,
                        min_hidden=min_hidden,
                        max_hidden=max_hidden,
                        min_layer=min_layer,
                        max_layer=max_layer),
                    min_weight=min_weight,
                    max_weight=max_weight,
                    num_samples=num_samples,
                    input_type=input_type,
                    output_type=output_type,
                    type_init=type_init,
                    mut_chance=mut_chance,
                    mutate_data=mutate_data)
         for _ in range(self.init_population_size)
     ]
예제 #2
0
        def cross_weights(weights1, weights2, trade1, trade2, layer_counts,
                          type_weight):
            # Switch the weights where they were cut off
            new_weights = weights1[:trade1] + weights2[trade2:]

            # Regenerate the weights or biases at the location where they were cut off to ensure they're the correct
            # size
            if type_weight == 'weight':
                new_weights[trade1 - 1] = ForwardNet.init_weights_random(
                    layer_counts[trade1], layer_counts[trade1 - 1])
            elif type_weight == 'bias':
                new_weights[trade1 - 1] = ForwardNet.init_weights_random(
                    layer_counts[trade1], 1)

            return new_weights
예제 #3
0
def binary_wine():
    """
    Uploads crisp data, fuzzifies it, and implements ec to try and find the best network
    :return: None
    """
    training_file = 'binary_wine_inputs.csv'  # 22 inputs, 13 outputs

    num_inputs = 22
    num_outputs = 13

    training_data = ForwardNet.load_and_fuzzify_training_data(training_file, num_inputs)
    random.shuffle(training_data)  # Shuffle the training data to keep results useful
    num_samples = 750  # Number of samples from the training data to use (reduces amount of computation)
    init_population_size = 50  # Number of parents to start out with
    selection_size = 50  # Number of survivors between the parents and the offspring (mu + lambda)
    generations = 15  # Iterations

    min_weight = -1  # Min value for weights and biases
    max_weight = 1  # Max value for weights and biases
    min_hidden = 5  # Min number of hidden nodes in a layer
    max_hidden = 10  # Max number of hidden nodes in a layer
    min_layer = 1  # Min number of layers
    max_layer = 3  # Max number of layers
    mut_chance = 50  # Mutation chance for each individual weight

    ec = EvoComp(init_population_size=init_population_size, selection_size=selection_size, generations=generations,
                 training_data=training_data, num_samples=num_samples, num_inputs=num_inputs, num_outputs=num_outputs,
                 min_weight=min_weight, max_weight=max_weight, min_hidden=min_hidden, max_hidden=max_hidden,
                 min_layer=min_layer, max_layer=max_layer, input_type=None, output_type=None,
                 type_init='random', mut_chance=mut_chance)

    ec.evolutionary_computation()
def binary_wine():
    """
    Uploads crisp data, fuzzifies it, and implements ec to try and find the best network
    :return: None
    """
    training_file = 'cross_data (3 inputs - 2 outputs).csv'  # 3 inputs, 2 outputs

    num_inputs = 3
    num_outputs = 2

    training_data = ForwardNet.load_training_data(training_file, num_inputs)
    random.shuffle(training_data)  # Shuffle the training data to keep results useful
    num_samples = None  # Number of samples from the training data to use (reduces amount of computation)
    population_size = 50  # Number of parents to start out with
    selection_size = 50  # Number of survivors between the parents and the offspring (mu + lambda)
    generations = 15  # Iterations

    min_weight = -1  # Min value for weights and biases
    max_weight = 1  # Max value for weights and biases
    min_hidden = 5  # Min number of hidden nodes in a layer
    max_hidden = 10  # Max number of hidden nodes in a layer
    min_layer = 1  # Min number of layers
    max_layer = 3  # Max number of layers
    mut_chance = 50  # Mutation chance for each individual weight

    ec = EvoComp(init_population_size=population_size, selection_size=selection_size, generations=generations,
                 training_data=training_data, num_samples=num_samples, num_inputs=num_inputs, num_outputs=num_outputs,
                 min_weight=min_weight, max_weight=max_weight, min_hidden=min_hidden, max_hidden=max_hidden,
                 min_layer=min_layer, max_layer=max_layer, input_type='raw_input', output_type='raw_output',
                 type_init='select', mut_chance=mut_chance, mutate_data=False)

    ec.evolutionary_computation()

    coordinate_x = []
    coordinate_y = []
    colors = []
    precision = 0.001  # Concentration of dots on graph
    multiplier = int(1 / (precision * 100))
    for i in range(42 * multiplier + 1):  # -2.1 to +2.1 on graph
        for j in range(42 * multiplier + 1):  # -2.1 to +2.1 on graph
            num1 = (float(i) / (10 * multiplier)) - 2.1
            num2 = (float(j) / (10 * multiplier)) - 2.1
            coordinate_x.append(num1)
            coordinate_y.append(num2)
            coordinate = [num1, num2, 0]  # 3rd coordinate is 0 because it doesn't alter output
            network_input = {'raw_input': coordinate}  # How ForwardNet takes an input into neural network
            outputs = ec.best_nn.forward_computation(network_input)  # Outputs[-1] is output layer's outputs
            colors.append('r' if outputs[-1].index(max(outputs[-1])) == 0 else 'b')  # Red if class 1, else blue

    plt.scatter(coordinate_x, coordinate_y, s=10, c=colors, alpha=0.5)
    plt.show()
예제 #5
0
def binary_wine():
    """
    Uploads crisp data, fuzzifies it, and implements ec to try and find the best network
    :return: None
    """
    training_file = 'binary_wine_inputs.csv'  # 22 inputs, 13 outputs

    num_inputs = 22
    num_outputs = 13

    training_data = ForwardNet.load_and_fuzzify_training_data(training_file, num_inputs)
    random.shuffle(training_data)  # Shuffle the training data to keep results useful
    num_samples = 750  # Number of samples from the training data to use (reduces amount of computation)
    init_population_size = 50  # Number of parents to start out with
    selection_size = 50  # Number of survivors between the parents and the offspring (mu + lambda)
    generations = 15  # Iterations

    min_weight = -1  # Min value for weights and biases
    max_weight = 1  # Max value for weights and biases
    min_hidden = 5  # Min number of hidden nodes in a layer
    max_hidden = 10  # Max number of hidden nodes in a layer
    min_layer = 1  # Min number of layers
    max_layer = 3  # Max number of layers
    mut_chance = 50  # Mutation chance for each individual weight

    ec = EvoComp(init_population_size=init_population_size, selection_size=selection_size, generations=generations,
                 training_data=training_data, num_samples=num_samples, num_inputs=num_inputs, num_outputs=num_outputs,
                 min_weight=min_weight, max_weight=max_weight, min_hidden=min_hidden, max_hidden=max_hidden,
                 min_layer=min_layer, max_layer=max_layer, input_type='raw_input', output_type='raw_output',
                 type_init='select', mut_chance=mut_chance, mutate_data=False)

    ec.evolutionary_computation()

    print('\n\n')
    preference = []
    print('Please take the following survey about your preferences in wine.')

    preference = preference + [0] * 2
    x = input('Do you like earthy flavors? ')
    if 'y' in x or 'Y' in x:
        preference.append(1.0)
    else:
        preference.append(0.0)

    preference = preference + [0] * 5
    x = input('Do you like smoky flavors? ')
    if 'y' in x or 'Y' in x:
        preference.append(1.0)
    else:
        preference.append(0.0)

    preference = preference + [0] * 1
    x = input('Do you like spicy flavors? ')
    if 'y' in x or 'Y' in x:
        preference.append(1.0)
    else:
        preference.append(0.0)

    # preference = preference + [0] * 0
    x = input('Do you like nutty flavors? ')
    if 'y' in x or 'Y' in x:
        preference.append(1.0)
    else:
        preference.append(0.0)

    preference = preference + [0] * 3
    x = input('Do you like floral flavors? ')
    if 'y' in x or 'Y' in x:
        preference.append(1.0)
    else:
        preference.append(0.0)

    # preference = preference + [0] * 0
    x = input('Do you like berry flavors? ')
    if 'y' in x or 'Y' in x:
        preference.append(1.0)
    else:
        preference.append(0.0)

    preference = preference + [0] * 1
    x = input('Do you like tropical flavors? ')
    if 'y' in x or 'Y' in x:
        preference.append(1.0)
    else:
        preference.append(0.0)

    preference = preference + [0] * 2
    x = input('Do you like sour flavors? ')
    if 'y' in x or 'Y' in x:
        preference.append(1.0)
    else:
        preference.append(0.0)

    network_input = {'raw_input': preference}
    outputs = ec.best_nn.forward_computation(network_input)
    output_indexes = sorted(range(len(outputs[-1])), key=lambda i: outputs[-1][i], reverse=True)[:3]

    master_outputs = ["pinot noir", "merlot", "sangiovese", "tempranillo", "cabernet sauvignon", "syrah", "pinot grigio", "riesling", "sauvignon blanc", "moscato", "gewürztraminer", "viognier", "chardonnay"]

    print('\nYou might like:')
    for j in output_indexes:
        print(master_outputs[j])
예제 #6
0
    def one_point_crossover(parent1, parent2):
        """
        This algorithm performs a structural one point crossover on two parents and returns two offspring. First, it
        randomly assigns each offspring one of two activation functions, input types, and output types. Next, it
        chooses what layers both parents will be split at. Finally, the layer counts, weights, and biases are switched
        accordingly and assigned to the children. The parents stay the same during this process to enable mu+lambda
        selection.
        :type parent1: NeuralNetwork
        :type parent2: NeuralNetwork
        :param parent1: Parent creating an offspring
        :param parent2: Parent creating an offspring
        :return: The two resulting children of the switched halves
        """
        def random_choice(choice1, choice2):
            if random.choice([True, False]):
                return choice1, choice2
            else:
                return choice2, choice1

        def cross_weights(weights1, weights2, trade1, trade2, layer_counts,
                          type_weight):
            # Switch the weights where they were cut off
            new_weights = weights1[:trade1] + weights2[trade2:]

            # Regenerate the weights or biases at the location where they were cut off to ensure they're the correct
            # size
            if type_weight == 'weight':
                new_weights[trade1 - 1] = ForwardNet.init_weights_random(
                    layer_counts[trade1], layer_counts[trade1 - 1])
            elif type_weight == 'bias':
                new_weights[trade1 - 1] = ForwardNet.init_weights_random(
                    layer_counts[trade1], 1)

            return new_weights

        # Assign the activation functions, input types, and output types
        child1_function, child2_function = random_choice(
            parent1.function, parent2.function)
        child1_input, child2_input = random_choice(parent1.input_type,
                                                   parent2.input_type)
        child1_output, child2_output = random_choice(parent1.output_type,
                                                     parent2.output_type)

        # Randomly choose which hidden layer they will be cut off at
        num_trade1 = random.randint(1, parent1.num_layers - 2)
        num_trade2 = random.randint(1, parent2.num_layers - 2)
        child1_layer_counts = parent1.layer_counts[:
                                                   num_trade1] + parent2.layer_counts[
                                                       num_trade2:]
        child2_layer_counts = parent2.layer_counts[:
                                                   num_trade2] + parent1.layer_counts[
                                                       num_trade1:]

        # Switch the weights according to the layers that were switched and generate the weights that are spliced during
        # the switching process.
        child1_weights = cross_weights(parent1.current_weights,
                                       parent2.current_weights, num_trade1,
                                       num_trade2, child1_layer_counts,
                                       'weight')
        child2_weights = cross_weights(parent2.current_weights,
                                       parent1.current_weights, num_trade2,
                                       num_trade1, child2_layer_counts,
                                       'weight')
        child1_biases = cross_weights(parent1.current_biases,
                                      parent2.current_biases, num_trade1,
                                      num_trade2, child1_layer_counts, 'bias')
        child2_biases = cross_weights(parent2.current_biases,
                                      parent1.current_biases, num_trade2,
                                      num_trade1, child2_layer_counts, 'bias')

        # The miracle of life
        child1 = ForwardNet(training_data=parent1.training_data,
                            weights=child1_weights,
                            biases=child1_biases,
                            weight_init='select',
                            layer_counts=child1_layer_counts,
                            input_type=child1_input,
                            output_type=child1_output,
                            type_init='select',
                            function=child1_function,
                            mutate_data=parent1.mutate_data)

        child2 = ForwardNet(training_data=parent2.training_data,
                            weights=child2_weights,
                            biases=child2_biases,
                            weight_init='select',
                            layer_counts=child2_layer_counts,
                            input_type=child2_input,
                            output_type=child2_output,
                            type_init='select',
                            function=child2_function,
                            mutate_data=parent2.mutate_data)

        return [child1, child2]