parser.add_argument("savefile", help="path to saved .mat file", type=str) args = parser.parse_args() ############################################################################ # Read config file ############################################################################ config = json.load(open(args.esnconfig + '.json', 'r')) ############################################################################ # Load data ############################################################################ # If the data is stored in a directory, load the data from there. Otherwise, # load from the single file and split it. if os.path.isdir(args.data): Xtr, Ytr, _, _, Xte, Yte = esnet.load_from_dir(args.data) else: X, Y = esnet.load_from_text(args.data) # Construct training/test sets Xtr, Ytr, _, _, Xte, Yte = esnet.generate_datasets(X, Y) def main(): # Run in parallel and store result in a numpy array Yhat,error,train_states,train_embedding,test_states,test_embedding = esnet.run_from_config_return_states(Xtr, Ytr, Xte, Yte, config) savemat(args.savefile, {'train_states':train_states, 'train_embedding':train_embedding, 'test_states':test_states, 'test_embedding':test_embedding}) if __name__ == "__main__": main()
elif dataType=='SantaFe' or dataType=='Sunspots' or dataType=='Hongik' \ or dataType=='GEFC' or dataType=='Mackey' or dataType=='SP500' \ or dataType=='Rainfall' or dataType=='Temperature' \ or dataType == 'MinTempMel' or dataType == 'SunSpotsZu'\ or dataType == 'TempAlbuquerque' or dataType == 'TempDenver' or dataType == 'TempLasVegas' \ or dataType == 'TempLosAngeles' or dataType == 'TempPhoenix' or dataType == 'TempPortland' \ or dataType == 'TempSanDiego' or dataType == 'TempSanFrancisco' or dataType == 'TempSeattle' \ or dataType == 'TempVancouver' \ or dataType == 'eleGB2015_7_12' or dataType == 'eleDE2015_7_12' or dataType == 'eleFR2015_7_12'\ or dataType == 'Electric': #Xtr, Ytr, _, _, Xte, Yte, Yscaler = esnet.generate_datasets_santafe(args.data) X, Y = esnet.load_from_text(args.data) # Construct training/test sets Xtr, Ytr, _, _, Xte, Yte, Yscaler = esnet.generate_datasets(X, Y) Xtr, Xte = esnet.reconstruct_input_1d([Xtr, Xte], reconstructconfig) Ytr, Yte = esnet.reconstruct_output_1d([Ytr, Yte], reconstructconfig) elif dataType == 'GEFC_temp' or dataType == 'HenonMap': X, Y = esnet.load_from_text(args.data) # Construct training/test sets Xtr, Ytr, _, _, Xte, Yte, Yscaler = esnet.generate_datasets(X, Y) # Reconstruct Xtr, Xte = esnet.reconstruct_input_2d([Xtr, Xte], reconstructconfig) Ytr, Yte = esnet.reconstruct_output_2d([Ytr, Yte], reconstructconfig) else:
optconfig = paramhelper._optimization ############################################################################ # Load data ############################################################################ print("Loading data (%s)" % args.data) # If the data is stored in a directory, load the data from there. Otherwise, # load from the single file and split it. if os.path.isdir(args.data): Xtr, Ytr, Xval, Yval, _, _ = esnet.load_from_dir(args.data) else: X, Y = esnet.load_from_text(args.data) # Construct training/test sets Xtr, Ytr, Xval, Yval, _, _ = esnet.generate_datasets(X, Y) ############################################################################ # Initialization of the genetic algorithm ############################################################################ # Fitness and individual. Different formats, depending on dimensionality reduction. if paramhelper._fixed_values['embedding'] == 'identity': creator.create("FitnessMin", base.Fitness, weights=(-1.0, )) # -1.0 => minimize function else: creator.create("FitnessMin", base.Fitness, weights=(-1.0, -0.1)) # -1.0 => minimize function # The individuals are dicts of numbers (parameters) # The length and the type of number varies across ESN configurations. creator.create("Individual", dict, fitness=creator.FitnessMin)
############################################################################ # Load data ############################################################################ logger.info("Loading data (%s)" % args.data) # If the data is stored in a directory, load the data from there. Otherwise, # load from the single file and split it. if os.path.isdir(args.data): Xtr, Ytr, Xval, Yval, _, _, Yscaler = esnet.load_from_dir(args.data) else: count = args.count X, Y = esnet.load_from_text(args.data) # Construct training/test sets Xtr, Ytr, Xval, Yval, _, _, Yscaler = esnet.generate_datasets( X[:count], Y[:count]) ############################################################################ # Initialization of the genetic algorithm ############################################################################ # Fitness and individual. Different formats, depending on dimensionality reduction. if paramhelper._fixed_values['embedding'] == 'identity': creator.create("FitnessMin", base.Fitness, weights=(-1.0, )) # -1.0 => minimize function else: creator.create("FitnessMin", base.Fitness, weights=(-1.0, -0.1)) # -1.0 => minimize function # The individuals are dicts of numbers (parameters) # The length and the type of number varies across ESN configurations. creator.create("Individual", dict, fitness=creator.FitnessMin)
elif dataType=='SantaFe' or dataType=='Sunspots' or dataType=='Hongik' \ or dataType=='GEFC' or dataType=='Mackey' or dataType=='SP500' \ or dataType == 'Rainfall' or dataType=='Temperature'\ or dataType=='MinTempMel' or dataType=='SunSpotsZu' \ or dataType == 'TempAlbuquerque' or dataType == 'TempDenver' or dataType=='TempLasVegas' \ or dataType == 'TempLosAngeles' or dataType == 'TempPhoenix' or dataType == 'TempPortland' \ or dataType == 'TempSanDiego' or dataType == 'TempSanFrancisco' or dataType == 'TempSeattle'\ or dataType == 'TempVancouver' \ or dataType == 'eleGB2015_7_12' or dataType == 'eleDE2015_7_12' or dataType == 'eleFR2015_7_12' \ or dataType == 'Electric': #Xtr, Ytr, Xval, Yval, _, _, Yscaler = esnet.generate_datasets_santafe(args.data) X, Y = esnet.load_from_text(args.data) # Construct training/test sets Xtr, Ytr, Xval, Yval, _, _, Yscaler = esnet.generate_datasets(X, Y) Xtr, Xval = esnet.reconstruct_input_1d([Xtr, Xval], reconstructconfig) Ytr, Yval = esnet.reconstruct_output_1d([Ytr, Yval], reconstructconfig) elif dataType=='GEFC_temp' or dataType=='HenonMap': X, Y = esnet.load_from_text(args.data) # Construct training/test sets Xtr, Ytr, Xval, Yval, _, _, Yscaler = esnet.generate_datasets(X, Y) # Reconstruct Xtr, Xval = esnet.reconstruct_input_2d([Xtr, Xval], reconstructconfig) Ytr, Yval = esnet.reconstruct_output_2d([Ytr, Yval], reconstructconfig) else:
############################################################################ # Load data ############################################################################ logger.info("Loading data (%s)" % args.data) # If the data is stored in a directory, load the data from there. Otherwise, # load from the single file and split it. if os.path.isdir(args.data): Xtr, Ytr, Xval, Yval, _, _ = esnet.load_from_dir(args.data) else: X, Y = esnet.load_from_text(args.data) # Construct training/test sets Xtr, Ytr, Xval, Yval, _, _ = esnet.generate_datasets(X, Y, scaler=MinMaxScaler) ############################################################################ # Initialization of the genetic algorithm ############################################################################ # Fitness and individual. Different formats, depending on dimensionality reduction. if paramhelper._fixed_values['embedding'] == 'identity': creator.create("FitnessMin", base.Fitness, weights=(-1.0, )) # -1.0 => minimize function else: creator.create("FitnessMin", base.Fitness, weights=(-1.0, -0.1)) # -1.0 => minimize function # The individuals are dicts of numbers (parameters) # The length and the type of number varies across ESN configurations.
def main(): averages = [] predictions_error = [] predictions = [] reals = [] startPoint = args.count #For the first few predictions, use the last error as prediction dataPath = '/home/minh/PycharmProjects/Ensemble/PythonESN/data/edgar_historical' count = 0 """with open(dataPath,'r') as f: for line in f: if count>0: data = line.split(',') total = 0 for i in range(5): total += float(data[i]) averages.append(total/5) reals.append(float(data[6])) count+=1""" """count = 0 with open(dataPath,'r') as f: for line in f: if count>0: if count>(startPoint+2): break else: data = line.split(',') total = 0 for i in range(5): total += float(data[i]) predictions.append(float(data[6])-total/5) count+=1 print('predictions:',predictions)""" # Run in parallel and store result in a numpy array X, Y = esnet.load_from_text(args.data) Xtr, Ytr, _, _, Xte, Yte, Yscaler = esnet.generate_datasets(X[:startPoint], Y[:startPoint]) Yhat, error = esnet.run_from_config(Xtr, Ytr, Xte, Yte, config, Yscaler) Yhat = np.ceil(Yscaler.inverse_transform(Yhat)) predictions_error.append(Yhat[len(Yhat) - 1][0]) """for i in range(startPoint,len(X)): Xtr, Ytr, _, _, Xte, Yte, Yscaler = esnet.generate_datasets(X[:i], Y[:i]) if i<800: config['n_drop'] = int(i/8) else: config['n_drop'] = 100 Yhat, error = esnet.run_from_config(Xtr, Ytr, Xte, Yte, config) Yhat = np.ceil(Yscaler.inverse_transform(Yhat)) #print('predictions:',Yhat) #print('error:',error) predictions_error.append(Yhat[len(Yhat)-1][0]) count+=1 if count%100==0: print('predictions made:',count)""" curPath = os.getcwd().split('/') writePath = '' for i in range(len(curPath)): writePath += curPath[i] + '/' configs = args.esnconfig.split('/') writePath += 'predictions/predictions_' + configs[-1] + '_' + str(args.times) """with open(writePath, 'a') as f: f.write(str(np.ceil(Yhat[len(Yhat) - 1][0]))+'\n')""" with open (writePath, 'w') as f: for i in range(len(Yhat)): f.writelines(str(np.ceil(Yhat[i][0])) + '\n')