class JordanRecurrentTest(unittest.TestCase): """ Tests JordanRecurrent """ def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = JordanRecurrent(existing_weight=.8) def test_class_init_(self): self.assertEqual('a', self.rec_config.source_type) self.assertEqual(1.0, self.rec_config.incoming_weight) self.assertEqual(0.8, self.rec_config.existing_weight) self.assertEqual('m', self.rec_config.connection_type) self.assertEqual(1, self.rec_config.copy_levels) self.assertEqual(0, self.rec_config.copy_nodes_layer) def test_get_source_nodes(self): self.assertEqual(self.net.layers[2].nodes, self.rec_config.get_source_nodes(self.net))
class ElmanSimpleRecurrentTest(unittest.TestCase): """ Tests ElmanSimpleRecurrent """ def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = ElmanSimpleRecurrent() def test_class_init_(self): self.assertEqual('a', self.rec_config.source_type) self.assertEqual(1.0, self.rec_config.incoming_weight) self.assertEqual(0.0, self.rec_config.existing_weight) self.assertEqual('m', self.rec_config.connection_type) self.assertEqual(1, self.rec_config.copy_levels) self.assertEqual(0, self.rec_config.copy_nodes_layer) def test_get_source_nodes(self): nodes1 = self.net.layers[1].get_nodes(NODE_HIDDEN) nodes2 = self.rec_config.get_source_nodes(self.net) # Should be the same self.assertEqual(len(nodes1), len(nodes2)) self.assertEqual(self.net.layers[1].get_nodes(NODE_HIDDEN), self.rec_config.get_source_nodes(self.net))
def setUp(self): self.net = NeuralNet() layer = Layer(0, 'input') layer.add_nodes(1, 'input') self.net.layers.append(layer) layer = Layer(1, 'hidden') layer.add_nodes(1, 'hidden') self.net.layers.append(layer) layer = Layer(2, 'output') layer.add_nodes(1, 'output') self.net.layers.append(layer) # Specify connections self.net.layers[1].nodes[0].add_input_connection( Connection(self.net.layers[0].nodes[0], self.net.layers[1].nodes[0], 1.00)) self.net.layers[2].nodes[0].add_input_connection( Connection(self.net.layers[1].nodes[0], self.net.layers[2].nodes[0], .75)) self.net._epochs = 1 self.net.copy_levels = 0 self.net._allinputs = [[.1], [.2], [.3], [.4], [.5]] self.net._alltargets = [[.2], [.4], [.6], [.8], [1.0]] self.net.input_layer = self.net.layers[0] self.net.output_layer = self.net.layers[-1]
class JordanRecurrentTest(unittest.TestCase): """ Tests JordanRecurrent """ def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = JordanRecurrent(existing_weight=.8) def test_class_init_(self): self.assertEqual('a', self.rec_config.source_type) self.assertEqual(1.0, self.rec_config.incoming_weight) self.assertEqual(0.8, self.rec_config.existing_weight) self.assertEqual('m', self.rec_config.connection_type) self.assertEqual(1, self.rec_config.copy_levels) self.assertEqual(0, self.rec_config.copy_nodes_layer) def test_get_source_nodes(self): self.assertEqual( self.net.layers[2].nodes, self.rec_config.get_source_nodes(self.net))
class NARXRecurrentTest(unittest.TestCase): """ Tests NARXRecurrent """ def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = NARXRecurrent(output_order=1, incoming_weight_from_output=.9, input_order=1, incoming_weight_from_input=.7) def test_class_init_(self): self.assertEqual(0, self.rec_config.existing_weight) self.assertEqual(None, self.rec_config._node_type) self.assertEqual([1, .9], self.rec_config.output_values) self.assertEqual([1, .7], self.rec_config.input_values) def test_get_source_nodes(self): self.rec_config._node_type = NODE_OUTPUT self.assertEqual(self.net.layers[-1].get_nodes(NODE_OUTPUT), self.rec_config.get_source_nodes(self.net)) self.rec_config._node_type = NODE_INPUT self.assertEqual(self.net.layers[0].get_nodes(NODE_INPUT), self.rec_config.get_source_nodes(self.net))
class ElmanSimpleRecurrentTest(unittest.TestCase): """ Tests ElmanSimpleRecurrent """ def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = ElmanSimpleRecurrent() def test_class_init_(self): self.assertEqual('a', self.rec_config.source_type) self.assertEqual(1.0, self.rec_config.incoming_weight) self.assertEqual(0.0, self.rec_config.existing_weight) self.assertEqual('m', self.rec_config.connection_type) self.assertEqual(1, self.rec_config.copy_levels) self.assertEqual(0, self.rec_config.copy_nodes_layer) def test_get_source_nodes(self): nodes1 = self.net.layers[1].get_nodes(NODE_HIDDEN) nodes2 = self.rec_config.get_source_nodes(self.net) # Should be the same self.assertEqual(len(nodes1), len(nodes2)) self.assertEqual( self.net.layers[1].get_nodes(NODE_HIDDEN), self.rec_config.get_source_nodes(self.net))
class NARXRecurrentTest(unittest.TestCase): """ Tests NARXRecurrent """ def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = NARXRecurrent( output_order=1, incoming_weight_from_output=.9, input_order=1, incoming_weight_from_input=.7) def test_class_init_(self): self.assertEqual(0, self.rec_config.existing_weight) self.assertEqual(None, self.rec_config._node_type) self.assertEqual([1, .9], self.rec_config.output_values) self.assertEqual([1, .7], self.rec_config.input_values) def test_get_source_nodes(self): self.rec_config._node_type = NODE_OUTPUT self.assertEqual( self.net.layers[-1].get_nodes(NODE_OUTPUT), self.rec_config.get_source_nodes(self.net)) self.rec_config._node_type = NODE_INPUT self.assertEqual( self.net.layers[0].get_nodes(NODE_INPUT), self.rec_config.get_source_nodes(self.net))
def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = NARXRecurrent(output_order=1, incoming_weight_from_output=.9, input_order=1, incoming_weight_from_input=.7)
class RecurrentConfigTest(unittest.TestCase): """ Tests RecurrentConfig """ def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = RecurrentConfig() def test_apply_config(self): self.assertRaises( ValueError, self.rec_config.apply_config, 'not neural net') def test__apply_config(self): print 'test__apply_config not yet implemented' def test_fully_connect(self): node = Node() unode1 = Node() unode2 = Node() self.rec_config._fully_connect(node, [unode1, unode2]) conn = unode1.input_connections[0] self.assertEqual(node, conn.lower_node) self.assertEqual(unode1, conn.upper_node) conn = unode2.input_connections[0] self.assertEqual(node, conn.lower_node) self.assertEqual(unode2, conn.upper_node) def test_get_source_nodes(self): self.assertEqual(True, isinstance( self.rec_config.get_source_nodes(self.net), NeuralNet)) def test_get_upper_nodes(self): self.assertEqual(1, len(self.rec_config.get_upper_nodes(self.net)))
class RecurrentConfigTest(unittest.TestCase): """ Tests RecurrentConfig """ def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = RecurrentConfig() def test_apply_config(self): self.assertRaises(ValueError, self.rec_config.apply_config, 'not neural net') def test__apply_config(self): print('test__apply_config not yet implemented') def test_fully_connect(self): node = Node() unode1 = Node() unode2 = Node() self.rec_config._fully_connect(node, [unode1, unode2]) conn = unode1.input_connections[0] self.assertEqual(node, conn.lower_node) self.assertEqual(unode1, conn.upper_node) conn = unode2.input_connections[0] self.assertEqual(node, conn.lower_node) self.assertEqual(unode2, conn.upper_node) def test_get_source_nodes(self): self.assertEqual( True, isinstance(self.rec_config.get_source_nodes(self.net), NeuralNet)) def test_get_upper_nodes(self): self.assertEqual(1, len(self.rec_config.get_upper_nodes(self.net)))
def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = NARXRecurrent( output_order=1, incoming_weight_from_output=.9, input_order=1, incoming_weight_from_input=.7)
def setUp(self): self.net = NeuralNet() layer = Layer(0, 'input') layer.add_nodes(1, 'input') self.net.layers.append(layer) layer = Layer(1, 'hidden') layer.add_nodes(1, 'hidden') self.net.layers.append(layer) layer = Layer(2, 'output') layer.add_nodes(1, 'output') self.net.layers.append(layer) # Specify connections self.net.layers[1].nodes[0].add_input_connection( Connection( self.net.layers[0].nodes[0], self.net.layers[1].nodes[0], 1.00)) self.net.layers[2].nodes[0].add_input_connection( Connection( self.net.layers[1].nodes[0], self.net.layers[2].nodes[0], .75)) self.net._epochs = 1 self.net.copy_levels = 0 self.net._allinputs = [[.1], [.2], [.3], [.4], [.5]] self.net._alltargets = [[.2], [.4], [.6], [.8], [1.0]] self.net.input_layer = self.net.layers[0] self.net.output_layer = self.net.layers[-1]
def buildIrisNetwork(all_inputs, all_targets): net = NeuralNet() net.init_layers(4, [6], 3) net.randomize_network() net.set_halt_on_extremes(True) # Set to constrain beginning weights to -.5 to .5 # Just to show we can #net.set_random_constraint(.5) net.set_learnrate(.1) net.set_all_inputs(all_inputs) net.set_all_targets(all_targets) length = len(all_inputs) learn_end_point = int(length * .5) net.set_learn_range(0, learn_end_point) net.set_test_range(learn_end_point + 1, length - 1) net.layers[0].set_activation_type('tanh') net.layers[1].set_activation_type('tanh') net.layers[2].set_activation_type('threshold') return net
while (True): if (b % 2 == 1 or a * b > 1 + 3 / 2 * np.pi): break return Weierstrass(a, b, arr) input_nodes = 1 hidden_nodes = 5 output_nodes = 1 output_order = 20 incoming_weight_from_output = .5 input_order = 20 incoming_weight_from_input = .5 net = NeuralNet() net.init_layers( input_nodes, [hidden_nodes], output_nodes, NARXRecurrent(output_order, incoming_weight_from_output, input_order, incoming_weight_from_input)) net.randomize_network() X = np.linspace(0, 10.0, num=10001) Y = simpleWeierstrassTimeSeries(X) Y = Y.reshape(-1, 1) net.set_all_inputs(Y[:-1]) net.set_all_targets(Y[1:]) net.set_learn_range(0, 8000)
class TestNeuralNet(unittest.TestCase): """ Tests NeuralNet """ def setUp(self): self.net = NeuralNet() layer = Layer(0, 'input') layer.add_nodes(1, 'input') self.net.layers.append(layer) layer = Layer(1, 'hidden') layer.add_nodes(1, 'hidden') self.net.layers.append(layer) layer = Layer(2, 'output') layer.add_nodes(1, 'output') self.net.layers.append(layer) # Specify connections self.net.layers[1].nodes[0].add_input_connection( Connection(self.net.layers[0].nodes[0], self.net.layers[1].nodes[0], 1.00)) self.net.layers[2].nodes[0].add_input_connection( Connection(self.net.layers[1].nodes[0], self.net.layers[2].nodes[0], .75)) self.net._epochs = 1 self.net.copy_levels = 0 self.net._allinputs = [[.1], [.2], [.3], [.4], [.5]] self.net._alltargets = [[.2], [.4], [.6], [.8], [1.0]] self.net.input_layer = self.net.layers[0] self.net.output_layer = self.net.layers[-1] def test_set_halt_on_extremes(self): self.net._halt_on_extremes = 'fail' self.net.set_halt_on_extremes(True) self.assertEqual(True, self.net._halt_on_extremes) self.net._halt_on_extremes = 'fail' self.net.set_halt_on_extremes(False) self.assertEqual(False, self.net._halt_on_extremes) self.net._halt_on_extremes = 'fail' self.failUnlessRaises(ValueError, self.net.set_halt_on_extremes, 'a') self.net._halt_on_extremes = 'fail' self.failUnlessRaises(ValueError, self.net.set_halt_on_extremes, 3) def test_get_halt_on_extremes(self): self.net.set_halt_on_extremes(True) self.assertEqual(True, self.net.get_halt_on_extremes()) self.net.set_halt_on_extremes(False) self.assertEqual(False, self.net.get_halt_on_extremes()) def test_set_random_constraint(self): self.net._random_constraint = 'fail' self.net.set_random_constraint(.1) self.assertEqual(.1, self.net._random_constraint) self.failUnlessRaises(ValueError, self.net.set_random_constraint, 3) self.failUnlessRaises(ValueError, self.net.set_random_constraint, 1) self.failUnlessRaises(ValueError, self.net.set_random_constraint, 0.0) self.failUnlessRaises(ValueError, self.net.set_random_constraint, -.2) self.failUnlessRaises(ValueError, self.net.set_random_constraint, 'a') def test_get_random_constraint(self): self.net.set_random_constraint(.2) self.assertEqual(.2, self.net.get_random_constraint()) self.net.set_random_constraint(.8) self.assertEqual(.8, self.net.get_random_constraint()) def test_set_epochs(self): self.net._epochs = 'fail' self.net.set_epochs(3) self.assertEqual(3, self.net._epochs) self.failUnlessRaises(ValueError, self.net.set_epochs, .3) self.failUnlessRaises(ValueError, self.net.set_epochs, 0) self.failUnlessRaises(ValueError, self.net.set_epochs, -3) self.failUnlessRaises(ValueError, self.net.set_epochs, -.2) self.failUnlessRaises(ValueError, self.net.set_epochs, 'a') def test_get_epochs(self): self.net.set_epochs(3) self.assertEqual(3, self.net.get_epochs()) def test_set_time_delay(self): self.net._time_delay = 'fail' self.net.set_time_delay(3) self.assertEqual(3, self.net._time_delay) self.failUnlessRaises(ValueError, self.net.set_time_delay, .3) self.failUnlessRaises(ValueError, self.net.set_time_delay, -3) self.failUnlessRaises(ValueError, self.net.set_time_delay, -.2) self.failUnlessRaises(ValueError, self.net.set_time_delay, 'a') def test_get_time_delay(self): self.net.set_time_delay(3) self.assertEqual(3, self.net.get_time_delay()) def test_set_all_inputs(self): pass def test_set_all_targets(self): pass def test_set_learnrate(self): pass def test_get_learnrate(self): pass def test__set_data_range(self): pass def test_set_learn_range(self): pass def test_get_learn_range(self): pass def test__check_time_delay(self): pass def test_get_learn_data(self): pass def test_get_validation_data(self): pass def test_get_test_data(self): pass def test__get_data(self): pass def test__get_randomized_position(self): pass def test__check_positions(self): pass def test_set_validation_range(self): pass def test_get_validation_range(self): pass def test_set_test_range(self): pass def test_get_test_range(self): pass def test_init_layers(self): pass def test__init_connections(self): pass def test__connect_layer(self): pass def test__build_output_conn(self): pass def test_randomize_network(self): pass def test_learn(self): pass def test_test(self): pass def test_calc_mse(self): self.assertAlmostEqual(10.0 / 2.0, self.net.calc_mse(100.0, 10)) def test_process_sample(self): pass def test__feed_forward(self): # simplify activations self.net.layers[0].set_activation_type('sigmoid') self.net.layers[1].set_activation_type('sigmoid') self.net.layers[2].set_activation_type('sigmoid') # These values should be replaced self.net.layers[1].nodes[0].set_value(1000.0) self.net.layers[2].nodes[0].set_value(1000.0) self.assertEqual(1000.0, self.net.layers[1].nodes[0].get_value()) self.assertEqual(1000.0, self.net.layers[2].nodes[0].get_value()) self.net.layers[0].load_inputs([.2]) self.net._feed_forward() self.assertEqual(.2, self.net.layers[0].nodes[0].get_value()) self.assertEqual( sigmoid(.2) * 1.0, self.net.layers[1].nodes[0].get_value()) self.assertEqual( sigmoid(sigmoid(.2) * 1.0) * .75, self.net.layers[2].nodes[0].get_value()) def test__back_propagate(self): pass def test__update_error(self): pass def test__adjust_weights(self): """ This function goes through layers starting with the top hidden layer and working its way down to the input layer. At each layer, the weights are adjusted based upon the errors. """ halt_on_extremes = True for layer_no in range(len(self.net.layers) - 2, 0, -1): layer = self.net.layers[layer_no + 1] layer.adjust_weights(self.net._learnrate, halt_on_extremes) def test__zero_errors(self): for layer in self.net.layers[1:]: for node in layer.nodes: node.error = 1000 self.net._zero_errors() for layer in self.net.layers[1:]: for node in layer.nodes: self.failIfEqual(1000, node.error) def test_calc_output_error(self): pass def test_calc_sample_error(self): pass def test__copy_levels(self): pass def test__parse_inputfile_layer(self): pass def test__parse_inputfile_node(self): pass def test__parse_inputfile_conn(self): pass def test__parse_inputfile_copy(self): pass def test__parse_node_id(self): pass def test_load(self): pass def test_output_values(self): pass def test__node_id(self): pass def test_save(self): pass
""" pop_sort = [item for item in population] random.shuffle(pop_sort) for item in pop_sort: yield item # Build the inputs pat, testpat = randomdata() for p in pat: all_inputs.append(p[0]) all_targets.append(p[1]) net = NeuralNet() net.init_layers(4, [6], 3) print array(all_inputs).shape print all_inputs print print print array(all_targets).shape print all_targets raw_input('') net.randomize_network() net.set_halt_on_extremes(True) # Set to constrain beginning weights to -.5 to .5
xs, ys=np.meshgrid(num_nodes, num_nodes) xs = np.concatenate([xs[0],xs[1],xs[2],xs[3], xs[4], xs[5], xs[6], xs[7], xs[8], xs[9], xs[10], xs[11], xs[12], xs[13], xs[14], xs[15], xs[16], xs[17], xs[18], xs[19], xs[20]]) ys = np.concatenate([ys[0],ys[1],ys[2],ys[3], ys[4], ys[5], ys[6], ys[7], ys[8], ys[9], ys[10], ys[11], ys[12], ys[13], ys[14], ys[15], ys[16], ys[17], ys[18], ys[19], ys[20]]) #createnetwork and run training for l1, l2 in zip (xs, ys): output_order = 6 incoming_weight_from_output = 1. input_order = 0 incoming_weight_from_input = 0. net = NeuralNet() net.init_layers(4, [l1,l2], 1, NARXRecurrent( output_order, incoming_weight_from_output, input_order, incoming_weight_from_input)) net.randomize_network() net.set_halt_on_extremes(True) # Set to constrain beginning weights to -.5 to .5 # Just to show we can net.set_random_constraint(.5) net.set_learnrate(.1)
sys.path.append('/home/david/Dropbox/programming/python/ann/myangn/sem6') sys.path.append('/home/david/Dropbox/programming/python/ann/mypybrain') from pylab import array, ylim, where, average from pylab import plot, legend, subplot, grid, xlabel, ylabel, show, title from pyneurgen.neuralnet import NeuralNet from pyneurgen.nodes import BiasNode, Connection from pybrain.utilities import percentError from iris import neurgenData from src.utilities import percentError # Build the inputs all_inputs, all_targets = neurgenData() net = NeuralNet() net.init_layers(4, [6], 3) net.randomize_network() net.set_halt_on_extremes(True) # Set to constrain beginning weights to -.5 to .5 # Just to show we can #net.set_random_constraint(.5) net.set_learnrate(.1) net.set_all_inputs(all_inputs) net.set_all_targets(all_targets) length = len(all_inputs) learn_end_point = int(length * .5)
pop_len = 360 factor = 1.0 / float(pop_len)# 计算0 - 359 全部sin population = [ (i, math.sin(float(i) * factor)) for i in range(pop_len) ] all_inputs = [] all_targets = [] # Build the inputs for position, target in population_gen(population): pos = float(position) all_inputs.append([random.random(), pos * factor]) all_targets.append([target]) net = NeuralNet() net.init_layers(2, [10], 1) net.randomize_network() net.learnrate = .20 net.randomize_network() net.set_all_inputs(all_inputs) net.set_all_targets(all_targets) length = len(all_inputs) learn_end_point = int(length * .8) net.set_learn_range(0, learn_end_point) net.set_test_range(learn_end_point + 1, length - 1) net.layers[1].set_activation_type('tanh') net.learn(epochs = 125, show_epoch_results = True, random_testing = False) mse = net.test()
y_train = y[0:i] y_test = y[i:i + 1] y_train = np.array(y_train).reshape((len(y_train), 1)) y_test = np.array(y_test).reshape((len(y_test), 1)) #transformando os dados para estar no intervalo de 0 a 1 scaler_x = MinMaxScaler() x_train = scaler_x.fit_transform(x_train) x_test = scaler_x.transform(x_test) scaler_y = MinMaxScaler() y_train = scaler_y.fit_transform(y_train) y_test = scaler_y.transform(y_test) x_input = np.concatenate( (x_train, x_test, np.zeros((1, np.shape(x_train)[1])))) y_input = np.concatenate((y_train, y_test, np.zeros((1, 1)))) #elaboracao do modelo de rede neural com os parametros definidos fit1 = NeuralNet() fit1.init_layers(input_nodes, [hidden_nodes], output_nodes, ElmanSimpleRecurrent()) fit1.randomize_network() fit1.layers[1].set_activation_type('sigmoid') fit1.set_learnrate(0.05) fit1.set_all_inputs(x_input) fit1.set_all_targets(y_input) fit1.set_learn_range(0, i) fit1.set_test_range(i, i + 1) fit1.learn(epochs=100, show_epoch_results=True, random_testing=False) mse = fit1.test() all_mse.append(mse) print("test set MSE = ", np.round(mse, 6)) target = [item[0][0] for item in fit1.test_targets_activations] target = scaler_y.inverse_transform(
for g in ges.population: g.all_inputs = all_inputs g.all_targets = all_targets print(ges.run()) print("Final Fitness list sorted best to worst:") print(ges.fitness_list.sorted()) print() print() g = ges.population[ges.fitness_list.best_member()] program = g.local_bnf['program'] saved_model = g.local_bnf['<saved_name>'][0] # We will create a brand new model net = NeuralNet() net.load(saved_model) net.set_all_inputs(all_inputs) net.set_all_targets(all_targets) test_start_point = int(pop_len * .8) + 1 net.set_test_range(test_start_point, pop_len - 1) mse = net.test() print("The selected model has the following characteristics") print("Activation Type:", net.layers[1].nodes[1].get_activation_type()) print("Hidden Nodes:", len(net.layers[1].nodes), ' + 1 bias node') print("Learn Rate:", net.get_learnrate()) print("Epochs:", net.get_epochs())
selected_data_time = [[row[0]] for row in selected_data] training_data = raw_data[start_training_idx:end_training_idx:pick_every] validate_data = raw_data[start_validate_idx:end_validate_idx:pick_every] test_data = raw_data[start_test_idx:end_test_idx:pick_every] # define NN input_nodes = 1 hidden_nodes = 10 output_nodes = 1 output_order = 10 incoming_weight_from_output = .6 input_order = 10 incoming_weight_from_input = .4 net = NeuralNet() net.init_layers( input_nodes, [hidden_nodes], output_nodes, NARXRecurrent(output_order, incoming_weight_from_output, input_order, incoming_weight_from_input)) net.randomize_network() net.set_halt_on_extremes(True) net.set_random_constraint(.5) net.set_learnrate(.1) # net.set_all_inputs(training_data[:, 1]) # this results in [a, b, ..., z] not [[a], [b], ..., [z]] # net.set_all_targets(training_data[:, 2]) net.set_all_inputs([[row[1]] for row in selected_data]) # wanting [[a], [b], ..., [z]] net.set_all_targets([[row[2]] for row in selected_data])
def serNeural(sDay,nAhead,x0,hWeek): nLin = sDay.shape[0] + nAhead nFit = sDay.shape[0] if int(x0['obs_time']) <= 14 else int(x0['obs_time']) predS = getHistory(sDay,nAhead,x0,hWeek) weekS = [x.isocalendar()[1] for x in sDay.index] population = [[float(i),sDay['y'][i],float(i%7),weekS[i]] for i in range(sDay.shape[0])] all_inputs = [] all_targets = [] factorY = sDay['y'].mean() factorT = 1.0 / float(len(population))*factorY factorD = 1./7.*factorY factorW = 1./52.*factorY factorS = 4.*sDay['y'].std() factorH = factorY/sDay['hist'].mean() def population_gen(population): pop_sort = [item for item in population] # random.shuffle(pop_sort) for item in pop_sort: yield item for t,y,y1,y2 in population_gen(population): #all_inputs.append([t*factorT,(.5-random.random())*factorS+factorY,y1*factorD,y2*factorW]) all_inputs.append([y1*factorD,(.5-random.random())*factorS+factorY,y2*factorW]) all_targets.append([y]) if False: plt.plot([x[0] for x in all_inputs],'-',label='targets0') plt.plot([x[1] for x in all_inputs],'-',label='targets1') plt.plot([x[2] for x in all_inputs],'-',label='targets2') # plt.plot([x[3] for x in all_inputs],'-',label='targets3') plt.plot([x[0] for x in all_targets],'-',label='actuals') plt.legend(loc='lower left', numpoints=1) plt.show() net = NeuralNet() net.init_layers(3,[10],1,NARXRecurrent(3,.6,2,.4)) net.randomize_network() net.set_random_constraint(.5) net.set_learnrate(.1) net.set_all_inputs(all_inputs) net.set_all_targets(all_targets) #predS['pred'] = [item[0][0] for item in net.test_targets_activations] length = len(all_inputs) learn_end_point = int(length * .8) # random.sample(all_inputs,10) net.set_learn_range(0, learn_end_point) net.set_test_range(learn_end_point + 1, length - 1) net.layers[1].set_activation_type('tanh') net.learn(epochs=125,show_epoch_results=True,random_testing=False) mse = net.test() #net.save(os.environ['LAV_DIR'] + "/out/train/net.txt") test_positions = [item[0][0] for item in net.get_test_data()] all_targets1 = [item[0][0] for item in net.test_targets_activations] all_actuals = [item[1][0] for item in net.test_targets_activations] # This is quick and dirty, but it will show the results plt.subplot(3, 1, 1) plt.plot([i for i in sDay['y']],'-') plt.title("Population") plt.grid(True) plt.subplot(3, 1, 2) plt.plot(test_positions, all_targets1, 'b-', label='targets') plt.plot(test_positions, all_actuals, 'r-', label='actuals') plt.grid(True) plt.legend(loc='lower left', numpoints=1) plt.title("Test Target Points vs Actual Points") plt.subplot(3, 1, 3) plt.plot(range(1, len(net.accum_mse) + 1, 1), net.accum_mse) plt.xlabel('epochs') plt.ylabel('mean squared error') plt.grid(True) plt.title("Mean Squared Error by Epoch") plt.show()
def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = RecurrentConfig()
temp.append(worksheet.cell(row,column+inc).value) inc+=1 all_targets.append(temp) row+=1 print all_targets """for position, target in population_gen(population): all_inputs.append([float(position) / float(pop_len), random.random()]) all_targets.append([target]) """ for g in ges.population: g.all_inputs = all_inputs g.all_targets = all_targets print ges.run() print "Final Fitness list sorted best to worst:" print ges.fitness_list.sorted() print print g = ges.population[ges.fitness_list.best_member()] program = g.local_bnf['program'] saved_model = g.local_bnf['<saved_name>'][0] # We will create a brand new model net = NeuralNet() net.load(saved_model)
def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = ElmanSimpleRecurrent()
all_inputs.append([position * factor]) all_targets.append([target]) # print(all_inputs[-1], all_targets[-1]) return population, all_inputs, all_targets # generate data population, all_inputs, all_targets = generate_data() # NARXRecurrent input_nodes, hidden_nodes, output_nodes = 1, 10, 1 output_order, incoming_weight_from_output = 3, .6 input_order, incoming_weight_from_input = 2, .4 # init neural network net = NeuralNet() net.init_layers( input_nodes, [hidden_nodes], output_nodes, NARXRecurrent(output_order, incoming_weight_from_output, input_order, incoming_weight_from_input)) net.randomize_network() net.set_halt_on_extremes(True) # set constrains and rates net.set_random_constraint(.5) net.set_learnrate(.1) # set inputs and outputs net.set_all_inputs(all_inputs) net.set_all_targets(all_targets)
def setUp(self): self.net = NeuralNet() self.net.init_layers(2, [1], 1) self.rec_config = JordanRecurrent(existing_weight=.8)
y = y.reshape(len(y), 1) scaler = preprocessing.MinMaxScaler(feature_range=(0, 1)) x = scaler.fit_transform(x) y = scaler.fit_transform(y) random.seed(101) input_nodes = 1 hidden_nodes = 10 output_nodes = 1 output_order = 1 input_order = 1 incoming_weight_from_output = 0.3 incoming_weight_from_input = 0.6 fit1 = NeuralNet() fit1.init_layers( input_nodes, [hidden_nodes], output_nodes, NARXRecurrent(output_order, incoming_weight_from_output, input_order, incoming_weight_from_input)) fit1.randomize_network() fit1.layers[1].set_activation_type('sigmoid') fit1.set_learnrate(0.35) fit1.set_all_inputs(x) fit1.set_all_targets(y) length = len(x) learn_end_point = int(length * 0.85) fit1.set_learn_range(0, learn_end_point) fit1.set_test_range(learn_end_point + 1, length - 1)