示例#1
0
    def test_compute_classification(self):
        network = RbfNetwork(2, 1, 2)

        network.copy_memory([
            2.0, # input 1 to RBF 1
            2.0, # input 2 to RBF 1
            5.0, # RBF width
            2.0, # RBF, center-0
            4.0, # RBF, center-1
            3.0, # RBF1 to Output 1
            4.0, # Bias to Output 1
            5.0, # RBF1 to Output 2
            6.0])  # Bias to Output 2

        x = [1, 2]

        y = network.compute_regression(x)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y[0], 3)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*5) + (1*6) = 11
        self.assertAlmostEqual(11, y[1], 3)

        cls = network.compure_classification(x)

        # class 1 is higher than class 0
        self.assertEqual(1, cls)
示例#2
0
    def test_compute_classification(self):
        network = RbfNetwork(2, 1, 2)

        network.copy_memory([
            2.0,  # input 1 to RBF 1
            2.0,  # input 2 to RBF 1
            5.0,  # RBF width
            2.0,  # RBF, center-0
            4.0,  # RBF, center-1
            3.0,  # RBF1 to Output 1
            4.0,  # Bias to Output 1
            5.0,  # RBF1 to Output 2
            6.0
        ])  # Bias to Output 2

        x = [1, 2]

        y = network.compute_regression(x)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y[0], 3)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*5) + (1*6) = 11
        self.assertAlmostEqual(11, y[1], 3)

        cls = network.compure_classification(x)

        # class 1 is higher than class 0
        self.assertEqual(1, cls)
示例#3
0
    def test_compute_regression(self):
        network = RbfNetwork(2, 1, 1)

        network.copy_memory([
            2.0, # input 1 to RBF 1
            2.0, # input 2 to RBF 1
            5.0, # RBF width
            2.0, # RBF, center-0
            4.0, # RBF, center-1
            3.0, # RBF1 to Output 1
            4.0   # Bias to Output 1
        ])

        x = [1, 2]

        y = network.compute_regression(x)[0]

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y, 3)
示例#4
0
    def test_compute_regression(self):
        network = RbfNetwork(2, 1, 1)

        network.copy_memory([
            2.0,  # input 1 to RBF 1
            2.0,  # input 2 to RBF 1
            5.0,  # RBF width
            2.0,  # RBF, center-0
            4.0,  # RBF, center-1
            3.0,  # RBF1 to Output 1
            4.0  # Bias to Output 1
        ])

        x = [1, 2]

        y = network.compute_regression(x)[0]

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y, 3)
示例#5
0
    # Update the network's long term memory to the vector we need to score.
    network.copy_memory(x)
    # Loop over the training set and calculate the output for each.
    actual_output = []
    for input_data in training_input:
        output_data = network.compute_regression(input_data)
        actual_output.append(output_data)
    # Calculate the error with MSE.
    result = ErrorCalculation.mse(np.array(actual_output), training_ideal)
    return result


# Create a copy of the long-term memory.  This becomes the initial state.
x0 = list(network.long_term_memory)

# Perform the annealing
train = TrainAnneal()
train.display_iteration = True
train.train(x0, score_funct)

# Display the final validation.  We show all of the iris data as well as the predicted species.
for i in range(0, len(training_input)):
    input_data = training_input[i]
    # Compute the output from the RBF network
    output_data = network.compute_regression(input_data)
    ideal_data = training_ideal[i]
    # Decode the three output neurons into a class number.
    class_id = norm.denorm_one_of_n(output_data)
    print(
        str(input_data) + " -> " + inv_classes[class_id] + ", Ideal: " +
        ideal_species[i])
示例#6
0
    # Update the network's long term memory to the vector we need to score.
    network.copy_memory(x)
    # Loop over the training set and calculate the output for each.
    actual_output = []
    for input_data in training_input:
        output_data = network.compute_regression(input_data)
        actual_output.append(output_data)
    # Calculate the error with MSE.
    result = ErrorCalculation.mse(np.array(actual_output), training_ideal)
    return result

# Create a copy of the long-term memory.  This becomes the initial state.
x0 = list(network.long_term_memory)

# Perform the annealing
train = TrainAnneal()
train.display_iteration = True
train.train(x0, score_funct)

# Display the final validation.  We show all of the iris data as well as the predicted species.
for i in range(0, len(training_input)):
    input_data = training_input[i]
    # Compute the output from the RBF network
    output_data = network.compute_regression(input_data)
    ideal_data = training_ideal[i]
    # Decode the three output neurons into a class number.
    class_id = norm.denorm_one_of_n(output_data)
    print(str(input_data) + " -> " + inv_classes[class_id] + ", Ideal: " + ideal_species[i])