예제 #1
0
class NARXRecurrentTest(unittest.TestCase):
    """
    Tests NARXRecurrent

    """
    def setUp(self):

        self.net = NeuralNet()
        self.net.init_layers(2, [1], 1)

        self.rec_config = NARXRecurrent(output_order=1,
                                        incoming_weight_from_output=.9,
                                        input_order=1,
                                        incoming_weight_from_input=.7)

    def test_class_init_(self):

        self.assertEqual(0, self.rec_config.existing_weight)
        self.assertEqual(None, self.rec_config._node_type)
        self.assertEqual([1, .9], self.rec_config.output_values)
        self.assertEqual([1, .7], self.rec_config.input_values)

    def test_get_source_nodes(self):

        self.rec_config._node_type = NODE_OUTPUT
        self.assertEqual(self.net.layers[-1].get_nodes(NODE_OUTPUT),
                         self.rec_config.get_source_nodes(self.net))

        self.rec_config._node_type = NODE_INPUT
        self.assertEqual(self.net.layers[0].get_nodes(NODE_INPUT),
                         self.rec_config.get_source_nodes(self.net))
예제 #2
0
def performNN(all_extracted_features, all_targets):
    from pyneurgen.neuralnet import NeuralNet
    #from pyneurgen.nodes import BiasNode, Connection
    net = NeuralNet()
    net.init_layers(len(all_extracted_features[0]), [2], 1)
    
    net.randomize_network()
    net.set_halt_on_extremes(True)
    
    #   Set to constrain beginning weights to -5 to 5
    #       Just to show we can
    #net.set_random_constraint(.5)
    net.set_learnrate(.001)
    
    net.set_all_inputs(all_extracted_features)
    net.set_all_targets(all_targets)
    
    length = len(all_extracted_features)
    learn_end_point = int(length * .8)
    
    net.set_learn_range(0, learn_end_point)
    net.set_test_range(learn_end_point + 1, length - 1)
    
    net.layers[1].set_activation_type('tanh')
    net.learn(epochs=150, show_epoch_results=True, random_testing=True)
    mse = net.test()
    print mse
예제 #3
0
def buildIrisNetwork(all_inputs, all_targets):
	net = NeuralNet()
	net.init_layers(4, [6], 3)

	net.randomize_network()
	net.set_halt_on_extremes(True)

	#   Set to constrain beginning weights to -.5 to .5
	#       Just to show we can
	#net.set_random_constraint(.5)
	net.set_learnrate(.1)

	net.set_all_inputs(all_inputs)
	net.set_all_targets(all_targets)

	length = len(all_inputs)
	learn_end_point = int(length * .5)

	net.set_learn_range(0, learn_end_point)
	net.set_test_range(learn_end_point + 1, length-1)

	net.layers[0].set_activation_type('tanh')
	net.layers[1].set_activation_type('tanh')
	net.layers[2].set_activation_type('threshold')
	return net
예제 #4
0
class ElmanSimpleRecurrentTest(unittest.TestCase):
    """
    Tests ElmanSimpleRecurrent

    """
    def setUp(self):

        self.net = NeuralNet()
        self.net.init_layers(2, [1], 1)

        self.rec_config = ElmanSimpleRecurrent()

    def test_class_init_(self):

        self.assertEqual('a', self.rec_config.source_type)
        self.assertEqual(1.0, self.rec_config.incoming_weight)
        self.assertEqual(0.0, self.rec_config.existing_weight)
        self.assertEqual('m', self.rec_config.connection_type)
        self.assertEqual(1, self.rec_config.copy_levels)
        self.assertEqual(0, self.rec_config.copy_nodes_layer)

    def test_get_source_nodes(self):

        nodes1 = self.net.layers[1].get_nodes(NODE_HIDDEN)
        nodes2 = self.rec_config.get_source_nodes(self.net)

        #   Should be the same
        self.assertEqual(len(nodes1), len(nodes2))
        self.assertEqual(self.net.layers[1].get_nodes(NODE_HIDDEN),
                         self.rec_config.get_source_nodes(self.net))
예제 #5
0
def buildIrisNetwork(all_inputs, all_targets):
    net = NeuralNet()
    net.init_layers(4, [6], 3)

    net.randomize_network()
    net.set_halt_on_extremes(True)

    #   Set to constrain beginning weights to -.5 to .5
    #       Just to show we can
    #net.set_random_constraint(.5)
    net.set_learnrate(.1)

    net.set_all_inputs(all_inputs)
    net.set_all_targets(all_targets)

    length = len(all_inputs)
    learn_end_point = int(length * .5)

    net.set_learn_range(0, learn_end_point)
    net.set_test_range(learn_end_point + 1, length - 1)

    net.layers[0].set_activation_type('tanh')
    net.layers[1].set_activation_type('tanh')
    net.layers[2].set_activation_type('threshold')
    return net
class JordanRecurrentTest(unittest.TestCase):
    """
    Tests JordanRecurrent

    """

    def setUp(self):

        self.net = NeuralNet()
        self.net.init_layers(2, [1], 1)

        self.rec_config = JordanRecurrent(existing_weight=.8)

    def test_class_init_(self):

        self.assertEqual('a', self.rec_config.source_type)
        self.assertEqual(1.0, self.rec_config.incoming_weight)
        self.assertEqual(0.8, self.rec_config.existing_weight)
        self.assertEqual('m', self.rec_config.connection_type)
        self.assertEqual(1, self.rec_config.copy_levels)
        self.assertEqual(0, self.rec_config.copy_nodes_layer)

    def test_get_source_nodes(self):

        self.assertEqual(
            self.net.layers[2].nodes,
            self.rec_config.get_source_nodes(self.net))
class ElmanSimpleRecurrentTest(unittest.TestCase):
    """
    Tests ElmanSimpleRecurrent

    """

    def setUp(self):

        self.net = NeuralNet()
        self.net.init_layers(2, [1], 1)

        self.rec_config = ElmanSimpleRecurrent()

    def test_class_init_(self):

        self.assertEqual('a', self.rec_config.source_type)
        self.assertEqual(1.0, self.rec_config.incoming_weight)
        self.assertEqual(0.0, self.rec_config.existing_weight)
        self.assertEqual('m', self.rec_config.connection_type)
        self.assertEqual(1, self.rec_config.copy_levels)
        self.assertEqual(0, self.rec_config.copy_nodes_layer)

    def test_get_source_nodes(self):

        nodes1 = self.net.layers[1].get_nodes(NODE_HIDDEN)
        nodes2 = self.rec_config.get_source_nodes(self.net)

        #   Should be the same
        self.assertEqual(len(nodes1), len(nodes2))
        self.assertEqual(
            self.net.layers[1].get_nodes(NODE_HIDDEN),
            self.rec_config.get_source_nodes(self.net))
class NARXRecurrentTest(unittest.TestCase):
    """
    Tests NARXRecurrent

    """

    def setUp(self):

        self.net = NeuralNet()
        self.net.init_layers(2, [1], 1)

        self.rec_config = NARXRecurrent(
                                        output_order=1,
                                        incoming_weight_from_output=.9,
                                        input_order=1,
                                        incoming_weight_from_input=.7)

    def test_class_init_(self):

        self.assertEqual(0, self.rec_config.existing_weight)
        self.assertEqual(None, self.rec_config._node_type)
        self.assertEqual([1, .9], self.rec_config.output_values)
        self.assertEqual([1, .7], self.rec_config.input_values)

    def test_get_source_nodes(self):

        self.rec_config._node_type = NODE_OUTPUT
        self.assertEqual(
            self.net.layers[-1].get_nodes(NODE_OUTPUT),
            self.rec_config.get_source_nodes(self.net))

        self.rec_config._node_type = NODE_INPUT
        self.assertEqual(
            self.net.layers[0].get_nodes(NODE_INPUT),
            self.rec_config.get_source_nodes(self.net))
예제 #9
0
class JordanRecurrentTest(unittest.TestCase):
    """
    Tests JordanRecurrent

    """
    def setUp(self):

        self.net = NeuralNet()
        self.net.init_layers(2, [1], 1)

        self.rec_config = JordanRecurrent(existing_weight=.8)

    def test_class_init_(self):

        self.assertEqual('a', self.rec_config.source_type)
        self.assertEqual(1.0, self.rec_config.incoming_weight)
        self.assertEqual(0.8, self.rec_config.existing_weight)
        self.assertEqual('m', self.rec_config.connection_type)
        self.assertEqual(1, self.rec_config.copy_levels)
        self.assertEqual(0, self.rec_config.copy_nodes_layer)

    def test_get_source_nodes(self):

        self.assertEqual(self.net.layers[2].nodes,
                         self.rec_config.get_source_nodes(self.net))
class RecurrentConfigTest(unittest.TestCase):
    """
    Tests RecurrentConfig

    """

    def setUp(self):

        self.net = NeuralNet()
        self.net.init_layers(2, [1], 1)

        self.rec_config = RecurrentConfig()

    def test_apply_config(self):

        self.assertRaises(
                ValueError,
                self.rec_config.apply_config, 'not neural net')

    def test__apply_config(self):

        print 'test__apply_config not yet implemented'

    def test_fully_connect(self):

        node = Node()
        unode1 = Node()
        unode2 = Node()

        self.rec_config._fully_connect(node, [unode1, unode2])

        conn = unode1.input_connections[0]
        self.assertEqual(node, conn.lower_node)
        self.assertEqual(unode1, conn.upper_node)
        conn = unode2.input_connections[0]
        self.assertEqual(node, conn.lower_node)
        self.assertEqual(unode2, conn.upper_node)

    def test_get_source_nodes(self):

        self.assertEqual(True, isinstance(
                                    self.rec_config.get_source_nodes(self.net),
                                    NeuralNet))

    def test_get_upper_nodes(self):

        self.assertEqual(1, len(self.rec_config.get_upper_nodes(self.net)))
예제 #11
0
class RecurrentConfigTest(unittest.TestCase):
    """
    Tests RecurrentConfig

    """
    def setUp(self):

        self.net = NeuralNet()
        self.net.init_layers(2, [1], 1)

        self.rec_config = RecurrentConfig()

    def test_apply_config(self):

        self.assertRaises(ValueError, self.rec_config.apply_config,
                          'not neural net')

    def test__apply_config(self):

        print('test__apply_config not yet implemented')

    def test_fully_connect(self):

        node = Node()
        unode1 = Node()
        unode2 = Node()

        self.rec_config._fully_connect(node, [unode1, unode2])

        conn = unode1.input_connections[0]
        self.assertEqual(node, conn.lower_node)
        self.assertEqual(unode1, conn.upper_node)
        conn = unode2.input_connections[0]
        self.assertEqual(node, conn.lower_node)
        self.assertEqual(unode2, conn.upper_node)

    def test_get_source_nodes(self):

        self.assertEqual(
            True,
            isinstance(self.rec_config.get_source_nodes(self.net), NeuralNet))

    def test_get_upper_nodes(self):

        self.assertEqual(1, len(self.rec_config.get_upper_nodes(self.net)))
예제 #12
0
        # print(all_inputs[-1], all_targets[-1])
    return population, all_inputs, all_targets


# generate data
population, all_inputs, all_targets = generate_data()

# NARXRecurrent
input_nodes, hidden_nodes, output_nodes = 1, 10, 1
output_order, incoming_weight_from_output = 3, .6
input_order, incoming_weight_from_input = 2, .4

# init neural network
net = NeuralNet()
net.init_layers(
    input_nodes, [hidden_nodes], output_nodes,
    NARXRecurrent(output_order, incoming_weight_from_output, input_order,
                  incoming_weight_from_input))
net.randomize_network()
net.set_halt_on_extremes(True)

# set constrains and rates
net.set_random_constraint(.5)
net.set_learnrate(.1)

# set inputs and outputs
net.set_all_inputs(all_inputs)
net.set_all_targets(all_targets)

# set lengths
length = len(all_inputs)
learn_end_point = int(length * .8)
예제 #13
0
def serNeural(sDay,nAhead,x0,hWeek):
    nLin = sDay.shape[0] + nAhead
    nFit = sDay.shape[0] if int(x0['obs_time']) <= 14 else int(x0['obs_time'])
    predS = getHistory(sDay,nAhead,x0,hWeek)
    weekS = [x.isocalendar()[1] for x in sDay.index]
    population = [[float(i),sDay['y'][i],float(i%7),weekS[i]] for i in range(sDay.shape[0])]
    all_inputs = []
    all_targets = []
    factorY = sDay['y'].mean()
    factorT = 1.0 / float(len(population))*factorY
    factorD = 1./7.*factorY
    factorW = 1./52.*factorY
    factorS = 4.*sDay['y'].std()
    factorH = factorY/sDay['hist'].mean()

    def population_gen(population):
        pop_sort = [item for item in population]
#        random.shuffle(pop_sort)
        for item in pop_sort:
            yield item
            
    for t,y,y1,y2 in population_gen(population):
        #all_inputs.append([t*factorT,(.5-random.random())*factorS+factorY,y1*factorD,y2*factorW])
        all_inputs.append([y1*factorD,(.5-random.random())*factorS+factorY,y2*factorW])
        all_targets.append([y])

    if False:
        plt.plot([x[0] for x in all_inputs],'-',label='targets0')
        plt.plot([x[1] for x in all_inputs],'-',label='targets1')
        plt.plot([x[2] for x in all_inputs],'-',label='targets2')
        # plt.plot([x[3] for x in all_inputs],'-',label='targets3')
        plt.plot([x[0] for x in all_targets],'-',label='actuals')
        plt.legend(loc='lower left', numpoints=1)
        plt.show()

    net = NeuralNet()
    net.init_layers(3,[10],1,NARXRecurrent(3,.6,2,.4))
    net.randomize_network()
    net.set_random_constraint(.5)
    net.set_learnrate(.1)
    net.set_all_inputs(all_inputs)
    net.set_all_targets(all_targets)
    #predS['pred'] = [item[0][0] for item in net.test_targets_activations]
    length = len(all_inputs)
    learn_end_point = int(length * .8)
    # random.sample(all_inputs,10)
    net.set_learn_range(0, learn_end_point)
    net.set_test_range(learn_end_point + 1, length - 1)
    net.layers[1].set_activation_type('tanh')

    net.learn(epochs=125,show_epoch_results=True,random_testing=False)
    mse = net.test()
    #net.save(os.environ['LAV_DIR'] + "/out/train/net.txt")

    test_positions = [item[0][0] for item in net.get_test_data()]
    all_targets1 = [item[0][0] for item in net.test_targets_activations]
    all_actuals = [item[1][0] for item in net.test_targets_activations]
    #   This is quick and dirty, but it will show the results
    plt.subplot(3, 1, 1)
    plt.plot([i for i in sDay['y']],'-')
    plt.title("Population")
    plt.grid(True)
    
    plt.subplot(3, 1, 2)
    plt.plot(test_positions, all_targets1, 'b-', label='targets')
    plt.plot(test_positions, all_actuals, 'r-', label='actuals')
    plt.grid(True)
    plt.legend(loc='lower left', numpoints=1)
    plt.title("Test Target Points vs Actual Points")

    plt.subplot(3, 1, 3)
    plt.plot(range(1, len(net.accum_mse) + 1, 1), net.accum_mse)
    plt.xlabel('epochs')
    plt.ylabel('mean squared error')
    plt.grid(True)
    plt.title("Mean Squared Error by Epoch")
    plt.show()
예제 #14
0
    """

    pop_sort = [item for item in population]
    random.shuffle(pop_sort)

    for item in pop_sort:
        yield item

#   Build the inputs
pat, testpat = randomdata()
for p in pat:
    all_inputs.append(p[0])
    all_targets.append(p[1])
    
net = NeuralNet()
net.init_layers(4, [6], 3)

print array(all_inputs).shape
print all_inputs
print 
print 
print array(all_targets).shape
print all_targets

raw_input('')

net.randomize_network()
net.set_halt_on_extremes(True)

#   Set to constrain beginning weights to -.5 to .5
#       Just to show we can
예제 #15
0
    sys.path.append('/home/david/Dropbox/programming/python/ann/mypybrain')

from pylab import array, ylim, where, average
from pylab import plot, legend, subplot, grid, xlabel, ylabel, show, title
from pyneurgen.neuralnet import NeuralNet
from pyneurgen.nodes import BiasNode, Connection
from pybrain.utilities import percentError

from iris import neurgenData
from src.utilities import percentError

#   Build the inputs
all_inputs, all_targets = neurgenData()

net = NeuralNet()
net.init_layers(4, [6], 3)

net.randomize_network()
net.set_halt_on_extremes(True)

#   Set to constrain beginning weights to -.5 to .5
#       Just to show we can
#net.set_random_constraint(.5)
net.set_learnrate(.1)

net.set_all_inputs(all_inputs)
net.set_all_targets(all_targets)

length = len(all_inputs)
learn_end_point = int(length * .5)
예제 #16
0
                        xs[14], xs[15], xs[16], xs[17], xs[18], xs[19], xs[20]])
    ys = np.concatenate([ys[0],ys[1],ys[2],ys[3], ys[4], ys[5], ys[6],
                         ys[7], ys[8], ys[9], ys[10], ys[11], ys[12], ys[13],
                        ys[14], ys[15], ys[16], ys[17], ys[18], ys[19], ys[20]])

#createnetwork and run training                     
    for l1, l2 in zip (xs, ys):
        output_order = 6
        incoming_weight_from_output = 1.
        input_order = 0
        incoming_weight_from_input = 0.
            
        net = NeuralNet()
        net.init_layers(4, [l1,l2], 1, NARXRecurrent(
                    output_order,
                    incoming_weight_from_output,
                    input_order,
                    incoming_weight_from_input))
        
        net.randomize_network()
        net.set_halt_on_extremes(True)
        
        
        #   Set to constrain beginning weights to -.5 to .5
        #       Just to show we can
        
        net.set_random_constraint(.5)
        net.set_learnrate(.1)
        net.set_all_inputs(all_inputs)
        net.set_all_targets(all_targets)
        
예제 #17
0
    factor = 1.0 / float(pop_len)# 计算0 - 359 全部sin
    population = [
        (i, math.sin(float(i) * factor)) for i in range(pop_len)
    ]

    all_inputs = []
    all_targets = []

    # Build the inputs
    for position, target in population_gen(population):
        pos = float(position)
        all_inputs.append([random.random(), pos * factor])
        all_targets.append([target])

    net = NeuralNet()
    net.init_layers(2, [10], 1)
    net.randomize_network()
    net.learnrate = .20

    net.randomize_network()
    net.set_all_inputs(all_inputs)
    net.set_all_targets(all_targets)
    length = len(all_inputs)

    learn_end_point = int(length * .8)
    net.set_learn_range(0, learn_end_point)
    net.set_test_range(learn_end_point + 1, length - 1)
    net.layers[1].set_activation_type('tanh')
    net.learn(epochs = 125, show_epoch_results = True, random_testing = False)
    mse = net.test()
예제 #18
0
 y_test = y[i:i + 1]
 y_train = np.array(y_train).reshape((len(y_train), 1))
 y_test = np.array(y_test).reshape((len(y_test), 1))
 #transformando os dados para estar no intervalo de 0 a 1
 scaler_x = MinMaxScaler()
 x_train = scaler_x.fit_transform(x_train)
 x_test = scaler_x.transform(x_test)
 scaler_y = MinMaxScaler()
 y_train = scaler_y.fit_transform(y_train)
 y_test = scaler_y.transform(y_test)
 x_input = np.concatenate(
     (x_train, x_test, np.zeros((1, np.shape(x_train)[1]))))
 y_input = np.concatenate((y_train, y_test, np.zeros((1, 1))))
 #elaboracao do modelo de rede neural com os parametros definidos
 fit1 = NeuralNet()
 fit1.init_layers(input_nodes, [hidden_nodes], output_nodes,
                  ElmanSimpleRecurrent())
 fit1.randomize_network()
 fit1.layers[1].set_activation_type('sigmoid')
 fit1.set_learnrate(0.05)
 fit1.set_all_inputs(x_input)
 fit1.set_all_targets(y_input)
 fit1.set_learn_range(0, i)
 fit1.set_test_range(i, i + 1)
 fit1.learn(epochs=100, show_epoch_results=True, random_testing=False)
 mse = fit1.test()
 all_mse.append(mse)
 print("test set MSE = ", np.round(mse, 6))
 target = [item[0][0] for item in fit1.test_targets_activations]
 target = scaler_y.inverse_transform(
     np.array(target).reshape((len(target), 1)))
 pred = [item[1][0] for item in fit1.test_targets_activations]
예제 #19
0
def population_gen(population):
    pop_sort = [item for item in population]
    random.shuffle(pop_sort)

    for item in pop_sort:
        yield item

#   Build the inputs
for position, target in population_gen(population):
    pos = float(position)
    all_inputs.append([random.random(), pos * factor])
    all_targets.append([target])

net = NeuralNet()
net.init_layers(2, [10], 1)

net.randomize_network()
net.set_halt_on_extremes(True)

#   Set to constrain beginning weights to -.5 to .5
#       Just to show we can
net.set_random_constraint(.5)
net.set_learnrate(.1)

net.set_all_inputs(all_inputs)
net.set_all_targets(all_targets)

length = len(all_inputs)
learn_end_point = int(length * .8)