from sklearn import preprocessing as pp from reservoir import Utility as util from performance import ErrorMetrics as rmse # Read data from the file data = np.loadtxt('MackeyGlass_t17.txt') # Normalize the raw data minMax = pp.MinMaxScaler((-1, 1)) data = minMax.fit_transform(data) #Get only 6000 points data = data[:5000].reshape((5000, 1)) # Number of points - 5000 trainingData, testingData = util.splitData2(data, 0.4) nTesting = testingData.shape[0] # Form feature vectors inputTrainingData, outputTrainingData = util.formFeatureVectors(trainingData) # Tune the network size = 256 initialTransient = 50 # Input-to-reservoir fully connected inputWeight = topology.ClassicInputTopology( inputSize=inputTrainingData.shape[1], reservoirSize=size).generateWeightMatrix() # Reservoir-to-reservoir fully connected
# Forecasting parameters depth = 30 # Read data from the file data = np.loadtxt('MackeyGlass_t17.txt') # Normalize the raw data #minMax = pp.MinMaxScaler((-1,1)) minMax = pp.StandardScaler() data = minMax.fit_transform(data) # Get only 6000 points data = data[:5000].reshape((5000, 1)) # Number of points - 5000 trainingData, testingData = util.splitData2(data, 0.85) availableData = trainingData nTesting = testingData.shape[0] # Divide the training data into training and validation validationRatio = 0.4 trainingData, validationData = util.splitData2(trainingData, 1.0 - validationRatio) # Form feature vectors trainingFeatureVectors, trainingTargetVectors = formFeatureTargetVectors( trainingData, depth) validationFeatureVectors, validationTargetVectors = formFeatureTargetVectors( validationData, depth) testingFeatureVectors, testingTargetVectors = formFeatureTargetVectors( testingData, depth)
from sklearn import preprocessing as pp from reservoir import Utility as util from performance import ErrorMetrics as rmse # Read data from the file data = np.loadtxt('MackeyGlass_t17.txt') # Normalize the raw data minMax = pp.MinMaxScaler((-1,1)) data = minMax.fit_transform(data) #Get only 6000 points data = data[:5000].reshape((5000, 1)) # Number of points - 5000 trainingData, testingData = util.splitData2(data, 0.4) nTesting = testingData.shape[0] # Form feature vectors inputTrainingData, outputTrainingData = util.formFeatureVectors(trainingData) # Tune the network size = 256 initialTransient = 50 # Input-to-reservoir fully connected inputWeight = topology.ClassicInputTopology(inputSize=inputTrainingData.shape[1], reservoirSize=size).generateWeightMatrix() # Reservoir-to-reservoir fully connected reservoirWeight = topology.ClassicReservoirTopology(size=size).generateWeightMatrix()
# Forecasting parameters depth = 30 # Read data from the file data = np.loadtxt("MackeyGlass_t17.txt") # Normalize the raw data # minMax = pp.MinMaxScaler((-1,1)) minMax = pp.StandardScaler() data = minMax.fit_transform(data) # Get only 6000 points data = data[:5000].reshape((5000, 1)) # Number of points - 5000 trainingData, testingData = util.splitData2(data, 0.85) availableData = trainingData nTesting = testingData.shape[0] # Divide the training data into training and validation validationRatio = 0.4 trainingData, validationData = util.splitData2(trainingData, 1.0 - validationRatio) # Form feature vectors trainingFeatureVectors, trainingTargetVectors = formFeatureTargetVectors(trainingData, depth) validationFeatureVectors, validationTargetVectors = formFeatureTargetVectors(validationData, depth) testingFeatureVectors, testingTargetVectors = formFeatureTargetVectors(testingData, depth) # Network parameters in_out_neurons = 1 hidden_neurons = 200