# Read data from the file data = np.loadtxt('MackeyGlass_t17.txt') # Normalize the raw data minMax = pp.MinMaxScaler((-1, 1)) data = minMax.fit_transform(data) #Get only 6000 points data = data[:5000].reshape((5000, 1)) # Number of points - 5000 trainingData, testingData = util.splitData2(data, 0.4) nTesting = testingData.shape[0] # Form feature vectors inputTrainingData, outputTrainingData = util.formFeatureVectors(trainingData) # Tune the network size = 256 initialTransient = 50 # Input-to-reservoir fully connected inputWeight = topology.ClassicInputTopology( inputSize=inputTrainingData.shape[1], reservoirSize=size).generateWeightMatrix() # Reservoir-to-reservoir fully connected reservoirWeight = topology.ClassicReservoirTopology( size=size).generateWeightMatrix() res = ESN.Reservoir(size=size,
data = np.loadtxt("MackeyGlass_t17.txt") # Normalize the raw data minMax = pp.MinMaxScaler((-1, 1)) data = minMax.fit_transform(data) # Get only 6000 points data = data[:6000].reshape((6000, 1)) # Split the data into training, validation and testing trainingData, validationData, testingData = util.splitData(data, 0.6, 0.3, 0.1) nValidation = validationData.shape[0] nTesting = testingData.shape[0] # Form feature vectors for training data trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData) actualOutputData = minMax.inverse_transform(testingData)[:, 0] # Initial seed initialSeedForValidation = trainingData[-1] networkSize = 500 populationSize = 10 noOfBest = int(populationSize / 2) noOfGenerations = 10 predictedOutputData, bestPopulation = utilityGA.tuneTrainPredictConnectivityGA( trainingInputData=trainingInputData, trainingOutputData=trainingOutputData, validationOutputData=validationData, initialInputSeedForValidation=initialSeedForValidation, horizon=nTesting, noOfBest=noOfBest,
data = np.loadtxt('MackeyGlass_t17.txt') # Normalize the raw data minMax = pp.MinMaxScaler((-1,1)) data = minMax.fit_transform(data) #Get only 4000 points data = data[:5000].reshape((5000, 1)) # Split the data into training, validation and testing trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2) nValidation = validationData.shape[0] nTesting = testingData.shape[0] # Form feature vectors for training data trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData) validationInputData, validationOutputData = util.formFeatureVectors(validationData) spectralRadiusBound = (0.0, 1.00) inputScalingBound = (0.0, 1.0) reservoirScalingBound = (0.0, 1.0) leakingRateBound = (0.0, 1.0) size = 256 initialTransient = 50 resTuner = tuner.ReservoirTuner(size=size, initialTransient=initialTransient, trainingInputData=trainingInputData, trainingOutputData=trainingOutputData, validationInputData=validationInputData, validationOutputData=validationOutputData, spectralRadiusBound=spectralRadiusBound,
data = np.loadtxt('MackeyGlass_t17.txt') # Normalize the raw data minMax = pp.MinMaxScaler((-1, 1)) data = minMax.fit_transform(data) #Get only 5000 points data = data[:5000].reshape((5000, 1)) # Split the data into training, validation and testing trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2) nValidation = validationData.shape[0] nTesting = testingData.shape[0] # Form feature vectors for training data trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData) actualOutputData = minMax.inverse_transform(testingData)[:, 0] # Initial seed initialSeedForValidation = trainingData[-1] predictedOutputData = util.tuneTrainPredictConnectivityNonBrute( trainingInputData=trainingInputData, trainingOutputData=trainingOutputData, validationOutputData=validationData, initialInputSeedForValidation=initialSeedForValidation, horizon=nTesting, resTopology=util.Topology.SmallWorldGraphs) predictedOutputData = minMax.inverse_transform(predictedOutputData)
# Read data from the file data = np.loadtxt('MackeyGlass_t17.txt') # Normalize the raw data minMax = pp.MinMaxScaler((-1,1)) data = minMax.fit_transform(data) #Get only 6000 points data = data[:5000].reshape((5000, 1)) # Number of points - 5000 trainingData, testingData = util.splitData2(data, 0.4) nTesting = testingData.shape[0] # Form feature vectors inputTrainingData, outputTrainingData = util.formFeatureVectors(trainingData) # Tune the network size = 256 initialTransient = 50 # Input-to-reservoir fully connected inputWeight = topology.ClassicInputTopology(inputSize=inputTrainingData.shape[1], reservoirSize=size).generateWeightMatrix() # Reservoir-to-reservoir fully connected reservoirWeight = topology.ClassicReservoirTopology(size=size).generateWeightMatrix() res = ESN.Reservoir(size=size, inputData=inputTrainingData, outputData=outputTrainingData, spectralRadius=0.79,
data = np.loadtxt('MackeyGlass_t17.txt') # Normalize the raw data minMax = pp.MinMaxScaler((-1, 1)) data = minMax.fit_transform(data) #Get only 4000 points data = data[:5000].reshape((5000, 1)) # Split the data into training, validation and testing trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2) nValidation = validationData.shape[0] nTesting = testingData.shape[0] # Form feature vectors for training data trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData) validationInputData, validationOutputData = util.formFeatureVectors( validationData) spectralRadiusBound = (0.0, 1.00) inputScalingBound = (0.0, 1.0) reservoirScalingBound = (0.0, 1.0) leakingRateBound = (0.0, 1.0) size = 256 initialTransient = 50 resTuner = tuner.ReservoirTuner(size=size, initialTransient=initialTransient, trainingInputData=trainingInputData, trainingOutputData=trainingOutputData, validationInputData=validationInputData, validationOutputData=validationOutputData,