Exemplo n.º 1
0
    def test_compute_classification(self):
        network = RbfNetwork(2, 1, 2)

        network.copy_memory([
            2.0, # input 1 to RBF 1
            2.0, # input 2 to RBF 1
            5.0, # RBF width
            2.0, # RBF, center-0
            4.0, # RBF, center-1
            3.0, # RBF1 to Output 1
            4.0, # Bias to Output 1
            5.0, # RBF1 to Output 2
            6.0])  # Bias to Output 2

        x = [1, 2]

        y = network.compute_regression(x)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y[0], 3)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*5) + (1*6) = 11
        self.assertAlmostEqual(11, y[1], 3)

        cls = network.compure_classification(x)

        # class 1 is higher than class 0
        self.assertEqual(1, cls)
Exemplo n.º 2
0
    def test_compute_classification(self):
        network = RbfNetwork(2, 1, 2)

        network.copy_memory([
            2.0,  # input 1 to RBF 1
            2.0,  # input 2 to RBF 1
            5.0,  # RBF width
            2.0,  # RBF, center-0
            4.0,  # RBF, center-1
            3.0,  # RBF1 to Output 1
            4.0,  # Bias to Output 1
            5.0,  # RBF1 to Output 2
            6.0
        ])  # Bias to Output 2

        x = [1, 2]

        y = network.compute_regression(x)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y[0], 3)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*5) + (1*6) = 11
        self.assertAlmostEqual(11, y[1], 3)

        cls = network.compure_classification(x)

        # class 1 is higher than class 0
        self.assertEqual(1, cls)
Exemplo n.º 3
0
    def test_compute_regression(self):
        network = RbfNetwork(2, 1, 1)

        network.copy_memory([
            2.0, # input 1 to RBF 1
            2.0, # input 2 to RBF 1
            5.0, # RBF width
            2.0, # RBF, center-0
            4.0, # RBF, center-1
            3.0, # RBF1 to Output 1
            4.0   # Bias to Output 1
        ])

        x = [1, 2]

        y = network.compute_regression(x)[0]

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y, 3)
Exemplo n.º 4
0
    def test_compute_regression(self):
        network = RbfNetwork(2, 1, 1)

        network.copy_memory([
            2.0,  # input 1 to RBF 1
            2.0,  # input 2 to RBF 1
            5.0,  # RBF width
            2.0,  # RBF, center-0
            4.0,  # RBF, center-1
            3.0,  # RBF1 to Output 1
            4.0  # Bias to Output 1
        ])

        x = [1, 2]

        y = network.compute_regression(x)[0]

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y, 3)
Exemplo n.º 5
0
    The score function.  Calculate the MSE error between the actual network output and the ideal values for the XOR.
    @param x: The long term memory that we are to score.
    @return: The MSE error.
    """
    global input_data
    global output_data
    network.copy_memory(x)
    actual_output = []
    for input_data in training_input:
        output_data = network.compute_regression(input_data)
        actual_output.append(output_data)
    return ErrorCalculation.mse(np.array(actual_output), training_ideal)

# Create a copy of the long-term memory.  This becomes the initial state.
x0 = list(network.long_term_memory)

# Train with hill climbing.
train = TrainHillClimb()
train.display_iteration = True
train.max_iterations = 100
train.stop_score = 0.05
result = train.train(x0, score_funct)

# Copy our final state to the long term memory of the network.
network.copy_memory(result)

# Display the output for the XOR.  XOR will not be trained perfectly.  You should see that the (0,1) and (1,0) inputs
# are both close to 1.0, whereas the (1,1) and (0,0) are close to 0.0.
for input_data in training_input:
    output_data = network.compute_regression(input_data)
    print(str(input_data) + " -> " + str(output_data))
Exemplo n.º 6
0
    @param x: The long term memory that we are to score.
    @return: The MSE error.
    """
    global input_data
    global output_data
    network.copy_memory(x)
    actual_output = []
    for input_data in training_input:
        output_data = network.compute_regression(input_data)
        actual_output.append(output_data)
    return ErrorCalculation.mse(np.array(actual_output), training_ideal)


# Create a copy of the long-term memory.  This becomes the initial state.
x0 = list(network.long_term_memory)

# Train with hill climbing.
train = TrainHillClimb()
train.display_iteration = True
train.max_iterations = 100
train.stop_score = 0.05
result = train.train(x0, score_funct)

# Copy our final state to the long term memory of the network.
network.copy_memory(result)

# Display the output for the XOR.  XOR will not be trained perfectly.  You should see that the (0,1) and (1,0) inputs
# are both close to 1.0, whereas the (1,1) and (0,0) are close to 0.0.
for input_data in training_input:
    output_data = network.compute_regression(input_data)
    print(str(input_data) + " -> " + str(output_data))