Example #1
0
    def test_compute_classification(self):
        network = RbfNetwork(2, 1, 2)

        network.copy_memory([
            2.0, # input 1 to RBF 1
            2.0, # input 2 to RBF 1
            5.0, # RBF width
            2.0, # RBF, center-0
            4.0, # RBF, center-1
            3.0, # RBF1 to Output 1
            4.0, # Bias to Output 1
            5.0, # RBF1 to Output 2
            6.0])  # Bias to Output 2

        x = [1, 2]

        y = network.compute_regression(x)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y[0], 3)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*5) + (1*6) = 11
        self.assertAlmostEqual(11, y[1], 3)

        cls = network.compure_classification(x)

        # class 1 is higher than class 0
        self.assertEqual(1, cls)
Example #2
0
    def test_reset_compute(self):
        network = RbfNetwork(2, 1, 1)
        total = 0

        for i in xrange(0, len(network.long_term_memory)):
            total += network.long_term_memory[i]

        self.assertEquals(0, total)

        network.reset()

        total = 0
        for i in xrange(0, len(network.long_term_memory)):
            total += network.long_term_memory[i]

        self.assertTrue(total > 1)
Example #3
0
    def test_reset_compute(self):
        network = RbfNetwork(2, 1, 1)
        total = 0

        for i in xrange(0, len(network.long_term_memory)):
            total += network.long_term_memory[i]

        self.assertEquals(0, total)

        network.reset()

        total = 0
        for i in xrange(0, len(network.long_term_memory)):
            total += network.long_term_memory[i]

        self.assertTrue(total > 1)
Example #4
0
    def test_compute_regression(self):
        network = RbfNetwork(2, 1, 1)

        network.copy_memory([
            2.0, # input 1 to RBF 1
            2.0, # input 2 to RBF 1
            5.0, # RBF width
            2.0, # RBF, center-0
            4.0, # RBF, center-1
            3.0, # RBF1 to Output 1
            4.0   # Bias to Output 1
        ])

        x = [1, 2]

        y = network.compute_regression(x)[0]

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y, 3)
Example #5
0
    def test_compute_regression(self):
        network = RbfNetwork(2, 1, 1)

        network.copy_memory([
            2.0,  # input 1 to RBF 1
            2.0,  # input 2 to RBF 1
            5.0,  # RBF width
            2.0,  # RBF, center-0
            4.0,  # RBF, center-1
            3.0,  # RBF1 to Output 1
            4.0  # Bias to Output 1
        ])

        x = [1, 2]

        y = network.compute_regression(x)[0]

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y, 3)
Example #6
0
    def test_compute_classification(self):
        network = RbfNetwork(2, 1, 2)

        network.copy_memory([
            2.0,  # input 1 to RBF 1
            2.0,  # input 2 to RBF 1
            5.0,  # RBF width
            2.0,  # RBF, center-0
            4.0,  # RBF, center-1
            3.0,  # RBF1 to Output 1
            4.0,  # Bias to Output 1
            5.0,  # RBF1 to Output 2
            6.0
        ])  # Bias to Output 2

        x = [1, 2]

        y = network.compute_regression(x)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*3) + (1*4) = 7
        self.assertAlmostEqual(7, y[0], 3)

        # Inputs: (2*1) + (2*2) = 6
        # RBF: Gaussian(6) = 1
        # Outputs: (1*5) + (1*6) = 11
        self.assertAlmostEqual(11, y[1], 3)

        cls = network.compure_classification(x)

        # class 1 is higher than class 0
        self.assertEqual(1, cls)
Example #7
0
inv_classes = {v: k for k, v in classes.items()}

# Normalize iris species using one-of-n.
# We could have used equilateral as well.  For an example of equilateral, see the example_nm_iris example.
norm.norm_col_one_of_n(iris_work, 4, classes, 0, 1)

# Prepare training data.  Separate into input and ideal.
training = np.array(iris_work)
training_input = training[:, 0:4]
training_ideal = training[:, 4:7]

# Create an RBF network.  There are four inputs and two outputs.
# There are also five RBF functions used internally.
# You can experiment with different numbers of internal RBF functions.
# However, the input and output must match the data set.
network = RbfNetwork(4, 4, 3)
network.reset()


def score_funct(x):
    """
    The score function for Iris anneal.
    @param x:
    @return:
    """
    global best_score
    global input_data
    global output_data
    # Update the network's long term memory to the vector we need to score.
    network.copy_memory(x)
    # Loop over the training set and calculate the output for each.
Example #8
0
# Normalize iris species using one-of-n.
# We could have used equilateral as well.  For an example of equilateral, see the example_nm_iris example.
norm.norm_col_one_of_n(iris_work, 4, classes, 0, 1)


# Prepare training data.  Separate into input and ideal.
training = np.array(iris_work)
training_input = training[:, 0:4]
training_ideal = training[:, 4:7]

# Create an RBF network.  There are four inputs and two outputs.
# There are also five RBF functions used internally.
# You can experiment with different numbers of internal RBF functions.
# However, the input and output must match the data set.
network = RbfNetwork(4, 4, 3)
network.reset()

def score_funct(x):
    """
    The score function for Iris anneal.
    @param x:
    @return:
    """
    global best_score
    global input_data
    global output_data
    # Update the network's long term memory to the vector we need to score.
    network.copy_memory(x)
    # Loop over the training set and calculate the output for each.
    actual_output = []
Example #9
0
# The input for the XOR operator.
training_input = np.array([
    [0.0, 0.0],
    [1.0, 0.0],
    [0.0, 1.0],
    [1.0, 1.0]])

# The ideal output for the XOR operator.  Each row corresponds to a row in the training_input.
training_ideal = np.array([
    [0.0],
    [1.0],
    [1.0],
    [0.0],
])

network = RbfNetwork(2, 5, 1)
network.reset()


def score_funct(x):
    """
    The score function.  Calculate the MSE error between the actual network output and the ideal values for the XOR.
    @param x: The long term memory that we are to score.
    @return: The MSE error.
    """
    global input_data
    global output_data
    network.copy_memory(x)
    actual_output = []
    for input_data in training_input:
        output_data = network.compute_regression(input_data)
Example #10
0
from rbf_network import RbfNetwork
from error import ErrorCalculation
from train import TrainHillClimb

# The input for the XOR operator.
training_input = np.array([[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]])

# The ideal output for the XOR operator.  Each row corresponds to a row in the training_input.
training_ideal = np.array([
    [0.0],
    [1.0],
    [1.0],
    [0.0],
])

network = RbfNetwork(2, 5, 1)
network.reset()


def score_funct(x):
    """
    The score function.  Calculate the MSE error between the actual network output and the ideal values for the XOR.
    @param x: The long term memory that we are to score.
    @return: The MSE error.
    """
    global input_data
    global output_data
    network.copy_memory(x)
    actual_output = []
    for input_data in training_input:
        output_data = network.compute_regression(input_data)
Example #11
0
    def test_basics(self):
        network = RbfNetwork(2, 1, 1)

        # should be 7, (2*1) + (1+(1 bias))*1 + 3 RBF params
        # 2 + 2 + 3 = 7
        self.assertEquals(7, len(network.long_term_memory))