def mso_task_regression_analysis(): print 'MSO Task Regression Analysis' shelf_file_name = 'data/mso_shelved.txt' washout_time = 100 training_time = 1000 testing_time = 600 evaluation_time = 500 #only last X steps evaluated input_range = np.arange(0, 10000, 1) #np.array([range(2000)]) data1 = np.sin(0.2*input_range) + np.sin(0.0311*input_range) data2 = np.sin(2.92*input_range) + np.sin(1.074*input_range) data3 = data1 + data2; data1 = data1[:, None] data2 = data2[:, None] data3 = data3[:, None]; train_target1 = data1[washout_time+1:washout_time+training_time] train_target2 = data2[washout_time+1:washout_time+training_time] train_target3 = data3[washout_time+1:washout_time+training_time] evaluation_data1 = data1[washout_time+training_time+(testing_time-evaluation_time):washout_time+training_time+testing_time] evaluation_data2 = data2[washout_time+training_time+(testing_time-evaluation_time):washout_time+training_time+testing_time] evaluation_data3 = data3[washout_time+training_time+(testing_time-evaluation_time):washout_time+training_time+testing_time] train_echo1 = load_object('train_echo1', shelf_file_name) train_echo2 = load_object('train_echo2', shelf_file_name) test_echo1 = load_object('test_echo1', shelf_file_name) test_echo2 = load_object('test_echo2', shelf_file_name) (w_out1, train_prediction1) = lin_regression_train(train_echo1, train_target1, ridge=1e-8) prediction1 = lin_regression_predict(test_echo1, w_out1) evaluaton_prediction1 = prediction1[-evaluation_time:] nrmse1 = error_metrics.nrmse(evaluaton_prediction1,evaluation_data1) print 'NRMSE1 sin(0.2) + sin(0.0311): ', nrmse1 (w_out2, train_prediction2) = lin_regression_train(train_echo2, train_target2, ridge=1e-8) prediction2 = lin_regression_predict(test_echo2, w_out2) evaluaton_prediction2 = prediction2[-evaluation_time:] nrmse2 = error_metrics.nrmse(evaluaton_prediction2,evaluation_data2) print 'NRMSE2: sin(2.92) + sin(1.074) ', nrmse2 train_X = np.append(train_echo1, train_echo2, 1) test_X = np.append(test_echo1, test_echo2, 1) (w_out3, train_prediction3) = lin_regression_train(train_X, train_target3, ridge=1e-8) prediction3 = lin_regression_predict(test_X, w_out3) evaluaton_prediction3 = prediction3[-evaluation_time:] nrmse3 = error_metrics.nrmse(evaluaton_prediction3,evaluation_data3) print 'NRMSE3 sin(0.2) + sin(0.0311) + sin(2.92) + sin(1.074) from regression on combined states of 1 and 2: ', nrmse3 plt.figure(1).clear() plt.plot( evaluation_data3, 'g' ) plt.plot( evaluaton_prediction3, 'b' ) plt.title('Test Performance') plt.legend(['Target signal', 'Free-running predicted signal']) plt.show()
def mso_separation_task(): """ multiple_superimposed_oscillators separation into the components""" input_range = np.arange(3000) #np.array([range(2000)]) timescale=10.0 osc1 = np.sin(input_range/timescale) osc2 = np.sin(2.1*input_range/timescale) osc3 = np.sin(3.4*input_range/timescale) train_target = np.column_stack((osc1, osc2, osc3)) train_input = osc1*np.cos(osc2+2.345*osc3) train_input = train_input[:, None] #1d->2d machine = ESN(1, 800, leak_rate=0.5) print 'Starting training...' start = time.time() trainer = LinearRegressionReadout(machine) trainer.train(train_input[:2000], train_target[:2000]) print 'Training Time: ', time.time() - start, 's' prediction = trainer.predict(train_input[2000:]) mse = error_metrics.mse(prediction,train_target[2000:]) nrmse = error_metrics.nrmse(prediction,train_target[2000:]) print 'MSE: ', mse, 'NRMSE:' , nrmse plt.subplot(3,1,1) plt.plot(train_input[2800:3000]) plt.title('Input') plt.subplot(3,1,2) plt.plot(train_target[2800:3000]) plt.title('Targets') plt.subplot(3,1,3) plt.plot(prediction[800:1000]) plt.title('Predictions') plt.show() return nrmse
def run_sequence(self, inputs, targets, washout_time=0): #TODO: Fuer feedback anpassen input_dim = inputs[0].shape[1] T = self.T nrmses = np.empty(self.T) self.best_nrmse = float('Inf') self.evaluation_target = targets[-1][washout_time:] for i in range(self.T): if self.dummy: machine = DummyESN() elif self.use_bubbles: machine = KitchenSinkBubbleESN(ninput=input_dim, **self.machine_params) else: machine = ESN(input_dim=input_dim, **self.machine_params) #IP if self.use_ip: activ_fct = IPTanhActivation(self.ip_learning_rate, 0, self.ip_std, self.machine_params["output_dim"], init_learn=False) ipTrainer = IPTrainer(machine, self.ip_learning_rate, self.ip_std) ipTrainer.train_sequence(inputs, washout_time) trainer = LinearRegressionReadout(machine, self.ridge); train_echo, train_prediction = trainer.train_sequence(inputs[:-1], targets[:-1], washout_time) test_echo, evaluation_prediction = trainer.predict(inputs[-1]) evaluation_prediction = evaluation_prediction[washout_time:] nrmse = error_metrics.nrmse(evaluation_prediction,self.evaluation_target) if (nrmse < self.best_nrmse): self.best_evaluation_prediction = evaluation_prediction self.best_nrmse = nrmse self.best_machine = machine self.best_trainer = trainer self.best_train_echo = train_echo self.best_test_echo = test_echo nrmses[i] = nrmse #if Plots: # esn_plotting.plot_output_distribution((normal_echo,train_echo), ('Output Distribution without IP','Output Distribution with IP',) ) if (self.LOG): print i,'NRMSE:', nrmse #, 'New Spectral Radius:', new_spectral_radius #if best_nrmse < math.pow(10,-4): # T = i + 1 # break self.mean_nrmse = mean(nrmses[:T]) self.std_nrmse = std(nrmses[:T]) #self.best_nrmse = min(nrmses[:T]) #print 'Min NRMSE: ', min_nrmse, 'Mean NRMSE: ', mean_nrmse, 'Std: ', std_nrmse if (self.LOG): print 'Min NRMSE: ', self.best_nrmse return self.best_nrmse, self.best_machine
def run(self, data, training_time, testing_time=None, washout_time=0, evaluation_time=None, target_columns=[0]): """ washout_time is part of the training_time (and evaluation_time part of the testing_time) """ #TODO: fb_columns fuer den Fall, dass das fb!=target ist #if fb == True: # fb_columns = target_columns LOG = self.LOG nr_rows = data.shape[0] if LOG: print nr_rows, 'time steps loaded' if len(data.shape)==1: data = data[:, None] nr_dims = data.shape[1] if testing_time == None: testing_time = data.shape[0]-training_time if evaluation_time == None: evaluation_time = testing_time T = self.T fb = self.fb machine_params = self.machine_params #Generell gibt es input_columns, target_columns und fb_columns. Im Momement gilt target_columns=fb_columns #Fuer washout und IP besteht der input aus input_columns + fb_columns all_columns = range(nr_dims) #input_columns = list(set(all_columns) - set(target_columns)) input_columns = exclude_columns(nr_dims, target_columns) if fb: input_dim = nr_dims #-len(target_columns) + len(fb_columns) pre_train_input_columns = all_columns fb_columns = target_columns #washout_input = data[:washout_time,all_columns] #if use_ip: # ip_pre_train_input = data[washout_time:training_time,all_columns] else: input_dim = nr_dims - len(target_columns) pre_train_input_columns = input_columns #washout_input = data[:washout_time,input_columns] #if use_ip: # ip_pre_train_input = data[washout_time:training_time,input_columns] washout_input = data[:washout_time,pre_train_input_columns] train_input = data[washout_time:training_time,input_columns] train_target = data[washout_time:training_time,target_columns] #x, y, z test_input = data[training_time:training_time+testing_time,input_columns] #test_target = data[training_time:training_time+testing_time,target_columns] self.evaluation_target = data[training_time+(testing_time-evaluation_time):training_time+testing_time,target_columns] nrmses = np.empty(T) self.best_nrmse = float('Inf') for i in range(T): if self.dummy: machine = DummyESN(**machine_params) elif self.use_bubbles: machine = KitchenSinkBubbleESN(ninput=input_dim, **machine_params) else: machine = ESN(input_dim=input_dim, **machine_params) #IP #normal_echo = machine.run_batch(train_target) if self.use_ip: ipTrainer = IPTrainer(machine, self.ip_learning_rate, self.ip_std) new_spectral_radius = ipTrainer.train(data, washout_time, training_time, pre_train_input_columns) machine.run_batch(washout_input) if fb: trainer = FeedbackReadout(machine, LinearRegressionReadout(machine, self.ridge)) train_echo, train_prediction = trainer.train(train_input=train_input, train_target=train_target, noise_var=self.fb_noise_var) machine.current_feedback = train_target[-1] test_echo, prediction = trainer.generate(testing_time, inputs=test_input) else: trainer = LinearRegressionReadout(machine, self.ridge); train_echo, train_prediction = trainer.train(train_input=train_input, train_target=train_target) test_echo, prediction = trainer.predict(test_input) evaluaton_prediction = prediction[-evaluation_time:] nrmse = error_metrics.nrmse(evaluaton_prediction,self.evaluation_target) if (nrmse < self.best_nrmse): self.best_evaluation_prediction = evaluaton_prediction self.best_nrmse = nrmse self.best_machine = machine self.best_trainer = trainer self.best_train_echo = train_echo self.best_evaluation_echo = test_echo[-evaluation_time:,:] nrmses[i] = nrmse #if Plots: # esn_plotting.plot_output_distribution((normal_echo,train_echo), ('Output Distribution without IP','Output Distribution with IP',) ) if (LOG): print i+1,'NRMSE:', nrmse #, 'New Spectral Radius:', new_spectral_radius #if best_nrmse < math.pow(10,-4): # T = i + 1 # break self.mean_nrmse = mean(nrmses[:T]) self.std_nrmse = std(nrmses[:T]) #self.best_nrmse = min(nrmses[:T]) #print 'Min NRMSE: ', min_nrmse, 'Mean NRMSE: ', mean_nrmse, 'Std: ', std_nrmse if (LOG): print 'Min NRMSE: ', self.best_nrmse return self.best_nrmse, self.best_machine