def __reservoirTrain__(self, x):

        #Extract the parameters
        spectralRadius = x[0]
        inputScaling = x[1]
        reservoirScaling = x[2]
        leakingRate = x[3]

        #Create the reservoir
        res = classicESN.Reservoir(size=self.size,
                                   spectralRadius=spectralRadius,
                                   inputScaling=inputScaling,
                                   reservoirScaling=reservoirScaling,
                                   leakingRate=leakingRate,
                                   initialTransient=self.initialTransient,
                                   inputData=self.trainingInputData,
                                   outputData=self.trainingOutputData,
                                   inputWeightRandom=self.inputWeightRandom,
                                   reservoirWeightRandom=self.reservoirWeightRandom)

        #Train the reservoir
        res.trainReservoir()

        #Predict for the validation data
        predictedOutputData = util.predictFuture(res, self.initialSeed, self.horizon)

        #Calcuate the regression error
        errorFunction = rmse.MeanSquareError()
        regressionError = errorFunction.compute(self.validationOutputData, predictedOutputData)

        #Return the error
        print("\nThe Parameters: "+str(x)+" Regression error:"+str(regressionError))
        return regressionError
    def __reservoirTrain__(self, x):

        #Extract the parameters
        meanDegree, beta = x
        meanDegree = int(meanDegree)

        # To get rid off the randomness in assigning weights, run it 10 times and  take the average error
        times = 100
        cumulativeError = 0

        for i in range(times):
            # Input and weight connectivity Matrix
            inputWeightMatrix = topology.ClassicInputTopology(
                self.inputD, self.size).generateWeightMatrix()
            reservoirWeightMatrix = topology.SmallWorldGraphs(
                size=self.size, meanDegree=meanDegree,
                beta=beta).generateWeightMatrix()

            #Create the reservoir
            res = classicESN.Reservoir(
                size=self.size,
                spectralRadius=self.spectralRadius,
                inputScaling=self.inputScaling,
                reservoirScaling=self.reservoirScaling,
                leakingRate=self.leakingRate,
                initialTransient=self.initialTransient,
                inputData=self.trainingInputData,
                outputData=self.trainingOutputData,
                inputWeightRandom=inputWeightMatrix,
                reservoirWeightRandom=reservoirWeightMatrix)

            #Train the reservoir
            res.trainReservoir()

            # Warm up
            predictedTrainingOutputData = res.predict(
                self.trainingInputData[-self.initialTransient:])

            #Predict for the validation data
            predictedOutputData = util.predictFuture(res, self.initialSeed,
                                                     self.horizon)

            gc.collect()

            #Calcuate the regression error
            errorFunction = metrics.MeanSquareError()
            error = errorFunction.compute(self.validationOutputData,
                                          predictedOutputData)
            cumulativeError += error

        regressionError = cumulativeError / times

        #Return the error
        print("\nThe Parameters: " + str(x) + " Regression error:" +
              str(regressionError))
        return regressionError
    def evaluate(self, x):

        # Extract the parameters
        attachment = int(x[0, 0])

        # To get rid off the randomness in assigning weights, run it 10 times and take the average error
        times = 1
        cumulativeError = 0

        for i in range(times):
            # Input and weight connectivity Matrix
            inputWeightMatrix = topology.ClassicInputTopology(
                self.inputD, self.size).generateWeightMatrix()
            network = topology.ScaleFreeNetworks(size=self.size,
                                                 attachmentCount=attachment)
            reservoirWeightMatrix = network.generateWeightMatrix()

            # Create the reservoir
            res = esn.Reservoir(size=self.size,
                                spectralRadius=self.spectralRadius,
                                inputScaling=self.inputScaling,
                                reservoirScaling=self.reservoirScaling,
                                leakingRate=self.leakingRate,
                                initialTransient=self.initialTransient,
                                inputData=self.trainingInputData,
                                outputData=self.trainingOutputData,
                                inputWeightRandom=inputWeightMatrix,
                                reservoirWeightRandom=reservoirWeightMatrix)

            # Train the reservoir
            res.trainReservoir()

            # Warm up
            predictedTrainingOutputData = res.predict(
                self.trainingInputData[-self.initialTransient:])

            # Predict for the validation data
            predictedOutputData = util.predictFuture(res, self.initialSeed,
                                                     self.horizon)

            gc.collect()

            # Calculate the regression error
            errorFunction = metrics.MeanSquareError()
            error = errorFunction.compute(self.validationOutputData,
                                          predictedOutputData)
            cumulativeError += error

        regressionError = cumulativeError / times

        # Return the error
        #print("Attachment: "+str(attachment) + "Error: "+str(regressionError))
        return regressionError
    def evaluate(self, x):

        # Extract the parameters
        meanDegree = int(x[0,0])
        beta = x[0,1]

        # To get rid off the randomness in assigning weights, run it 10 times and take the average error
        times = 1
        cumulativeError = 0

        for i in range(times):
            # Input and weight connectivity Matrix
            inputWeightMatrix = topology.ClassicInputTopology(self.inputD, self.size).generateWeightMatrix()
            network = topology.SmallWorldGraphs(size=self.size, meanDegree=meanDegree, beta=beta)
            reservoirWeightMatrix = network.generateWeightMatrix()

            # Create the reservoir
            res = esn.Reservoir(size=self.size,
                                spectralRadius=self.spectralRadius,
                                inputScaling=self.inputScaling,
                                reservoirScaling=self.reservoirScaling,
                                leakingRate=self.leakingRate,
                                initialTransient=self.initialTransient,
                                inputData=self.trainingInputData,
                                outputData=self.trainingOutputData,
                                inputWeightRandom=inputWeightMatrix,
                                reservoirWeightRandom=reservoirWeightMatrix)

            # Train the reservoir
            res.trainReservoir()

            # Warm up
            predictedTrainingOutputData = res.predict(self.trainingInputData[-self.initialTransient:])

            # Predict for the validation data
            predictedOutputData = util.predictFuture(res, self.initialSeed, self.horizon)

            gc.collect()

            # Calculate the regression error
            errorFunction = metrics.MeanSquareError()
            error = errorFunction.compute(self.validationOutputData, predictedOutputData)
            cumulativeError += error

        regressionError = cumulativeError/times

        # Return the error
        #print("SMG parameters: "+str(x) + "Error: "+str(regressionError))
        return regressionError
    def __reservoirTrain__(self, x):

        #Extract the parameters
        attachment = int(x)

        # To get rid off the randomness in assigning weights, run it 10 times and  take the average error
        times = 100
        cumulativeError = 0

        for i in range(times):
            # Input and weight connectivity Matrix
            inputWeightMatrix = topology.ClassicInputTopology(self.inputD, self.size).generateWeightMatrix()
            reservoirWeightMatrix = topology.ScaleFreeNetworks(size=self.size, attachmentCount=attachment).generateWeightMatrix()

            #Create the reservoir
            res = classicESN.Reservoir(size=self.size,
                                      spectralRadius=self.spectralRadius,
                                      inputScaling=self.inputScaling,
                                      reservoirScaling=self.reservoirScaling,
                                      leakingRate=self.leakingRate,
                                      initialTransient=self.initialTransient,
                                      inputData=self.trainingInputData,
                                      outputData=self.trainingOutputData,
                                      inputWeightRandom=inputWeightMatrix,
                                      reservoirWeightRandom=reservoirWeightMatrix)

            #Train the reservoir
            res.trainReservoir()

            # Warm up
            predictedTrainingOutputData = res.predict(self.trainingInputData[-self.initialTransient:])

            #Predict for the validation data
            predictedOutputData = util.predictFuture(res, self.initialSeed, self.horizon)

            gc.collect()

            #Calcuate the regression error
            errorFunction = metrics.MeanSquareError()
            error = errorFunction.compute(self.validationOutputData, predictedOutputData)
            cumulativeError += error

        regressionError = cumulativeError/times

        #Return the error
        print("\nThe Parameters: "+str(x)+" Regression error:"+str(regressionError))
        return regressionError
    def __reservoirTrain__(self, x):

        #Extract the parameters
        spectralRadius = x[0]
        inputScaling = x[1]
        reservoirScaling = x[2]
        leakingRate = x[3]

        #Create the reservoir
        res = classicESN.Reservoir(
            size=self.size,
            spectralRadius=spectralRadius,
            inputScaling=inputScaling,
            reservoirScaling=reservoirScaling,
            leakingRate=leakingRate,
            initialTransient=self.initialTransient,
            inputData=self.trainingInputData,
            outputData=self.trainingOutputData,
            inputWeightRandom=self.inputWeightRandom,
            reservoirWeightRandom=self.reservoirWeightRandom)

        #Train the reservoir
        res.trainReservoir()

        # Warm up
        predictedTrainingOutputData = res.predict(
            self.trainingInputData[-self.initialTransient:])

        #Predict for the validation data
        predictedOutputData = util.predictFuture(res, self.initialSeed,
                                                 self.horizon)

        gc.collect()

        #Calcuate the regression error
        errorFunction = metrics.MeanSquareError()
        regressionError = errorFunction.compute(self.validationOutputData,
                                                predictedOutputData)

        #Return the error
        print("\nThe Parameters: " + str(x) + " Regression error:" +
              str(regressionError))
        return regressionError
Example #7
0
    def evaluate(self, x):

        #Extract the parameters
        spectralRadius = x[0, 0]
        inputScaling = x[0, 1]
        reservoirScaling = x[0, 2]
        leakingRate = x[0, 3]

        #Create the reservoir
        res = esn.Reservoir(size=self.size,
                            spectralRadius=spectralRadius,
                            inputScaling=inputScaling,
                            reservoirScaling=reservoirScaling,
                            leakingRate=leakingRate,
                            initialTransient=self.initialTransient,
                            inputData=self.trainingInputData,
                            outputData=self.trainingOutputData,
                            inputWeightRandom=self.inputWeightRandom,
                            reservoirWeightRandom=self.reservoirWeightRandom)

        #Train the reservoir
        res.trainReservoir()

        # Warm up
        predictedTrainingOutputData = res.predict(self.trainingInputData[-self.initialTransient:])

        #Predict for the validation data
        predictedOutputData = util.predictFuture(res, self.initialSeed, self.horizon)

        gc.collect()

        #Calcuate the regression error
        errorFunction = metrics.MeanSquareError()
        regressionError = errorFunction.compute(self.validationOutputData, predictedOutputData)

        #Return the error
        return regressionError
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 5000 points
data = data[:5000].reshape((5000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData = util.tuneTrainPredictConnectivityNonBrute(
    trainingInputData=trainingInputData,
    trainingOutputData=trainingOutputData,
    validationOutputData=validationData,
    initialInputSeedForValidation=initialSeedForValidation,
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 5000 points
data = data[:5000].reshape((5000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData = util.tuneTrainPredictConnectivity(
    trainingInputData=trainingInputData,
    trainingOutputData=trainingOutputData,
    validationOutputData=validationData,
    initialInputSeedForValidation=initialSeedForValidation,
Example #10
0
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(
    data, 0.5, 0.25, 0.25)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData = utilGA.tuneTrainPredictGA(
    trainingInputData=trainingInputData,
    trainingOutputData=trainingOutputData,
    validationOutputData=validationData,
    initialInputSeedForValidation=initialSeedForValidation,
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.5, 0.25, 0.25)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
#actualOutputData = minMax.inverse_transform(np.vstack((validationData[:nValidation],testingData[:nTesting])))[:,0]
actualOutputData = minMax.inverse_transform(testingData)[:,0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData, error = util.tuneTrainPredict(trainingInputData=trainingInputData,
                                            trainingOutputData=trainingOutputData,
                                            validationOutputData=validationData,
                                            initialInputSeedForValidation=initialSeedForValidation,
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 5000 points
data = data[:5000].reshape((5000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:,0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData = util.tuneTrainPredictConnectivity(trainingInputData=trainingInputData,
                                                              trainingOutputData=trainingOutputData,
                                                              validationOutputData=validationData,
                                                              initialInputSeedForValidation=initialSeedForValidation,
                                                              horizon = nTesting,
from sklearn import preprocessing as pp
from reservoir import Utility as util
from performance import ErrorMetrics as rmse

# Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:5000].reshape((5000, 1))

# Number of points - 5000
trainingData, testingData = util.splitData2(data, 0.4)
nTesting = testingData.shape[0]

# Form feature vectors
inputTrainingData, outputTrainingData = util.formFeatureVectors(trainingData)

# Tune the network
size = 256
initialTransient = 50

# Input-to-reservoir fully connected
inputWeight = topology.ClassicInputTopology(inputSize=inputTrainingData.shape[1], reservoirSize=size).generateWeightMatrix()

# Reservoir-to-reservoir fully connected
reservoirWeight = topology.ClassicReservoirTopology(size=size).generateWeightMatrix()
os.mkdir(outputFolderName)

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(
    data, 0.5, 0.25, 0.25)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]

# Error function
errorFun = metrics.MeanSquareError()

# Number of iterations
iterations = 1
Example #15
0
# Forecasting parameters
depth = 30

# Read data from the file
data = np.loadtxt("MackeyGlass_t17.txt")

# Normalize the raw data
# minMax = pp.MinMaxScaler((-1,1))
minMax = pp.StandardScaler()
data = minMax.fit_transform(data)

# Get only 6000 points
data = data[:5000].reshape((5000, 1))

# Number of points - 5000
trainingData, testingData = util.splitData2(data, 0.85)
availableData = trainingData
nTesting = testingData.shape[0]

# Divide the training data into training and validation
validationRatio = 0.4
trainingData, validationData = util.splitData2(trainingData, 1.0 - validationRatio)

# Form feature vectors
trainingFeatureVectors, trainingTargetVectors = formFeatureTargetVectors(trainingData, depth)
validationFeatureVectors, validationTargetVectors = formFeatureTargetVectors(validationData, depth)
testingFeatureVectors, testingTargetVectors = formFeatureTargetVectors(testingData, depth)

# Network parameters
in_out_neurons = 1
hidden_neurons = 200
os.mkdir(outputFolderName)

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.5, 0.25, 0.25)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:,0]

# Initial seed
initialSeedForValidation = trainingData[-1]

# Error function
errorFun = metrics.MeanSquareError()

# Number of iterations
iterations = 1
results = open(outputFolderName + "/OptimalParameters.res", 'w')

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(
    data, 0.5, 0.25, 0.25)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]

# Error function
error = metrics.MeanSquareError()

# Number of iterations
iterations = 1
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 5000 points
data = data[:5000].reshape((5000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:,0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData = util.tuneTrainPredictConnectivityNonBrute(trainingInputData=trainingInputData,
                                                              trainingOutputData=trainingOutputData,
                                                              validationOutputData=validationData,
                                                              initialInputSeedForValidation=initialSeedForValidation,
                                                              horizon = nTesting,
Example #19
0
def tuneTrainPredict(trainingInputData,
                     trainingOutputData,
                     validationOutputData,
                     initialInputSeedForValidation,
                     testingData,
                     size=256,
                     initialTransient=50,
                     spectralRadiusBound=(0.0, 1.0),
                     inputScalingBound=(0.0, 1.0),
                     reservoirScalingBound=(0.0, 1.0),
                     leakingRateBound=(0.0, 1.0),
                     reservoirTopology=None):

    # Generate the input and reservoir weight matrices based on the reservoir topology
    inputWeightMatrix = topology.ClassicInputTopology(
        inputSize=trainingInputData.shape[1],
        reservoirSize=size).generateWeightMatrix()
    if reservoirTopology is None:
        reservoirWeightMatrix = topology.ClassicReservoirTopology(
            size=size).generateWeightMatrix()
    else:  #TODO - think about matrix multiplication
        reservoirWeightMatrix = reservoirTopology.generateWeightMatrix()

    resTuner = tuner.ReservoirParameterTuner(
        size=size,
        initialTransient=initialTransient,
        trainingInputData=trainingInputData,
        trainingOutputData=trainingOutputData,
        initialSeed=initialInputSeedForValidation,
        validationOutputData=validationOutputData,
        spectralRadiusBound=spectralRadiusBound,
        inputScalingBound=inputScalingBound,
        reservoirScalingBound=reservoirScalingBound,
        leakingRateBound=leakingRateBound,
        inputWeightMatrix=inputWeightMatrix,
        reservoirWeightMatrix=reservoirWeightMatrix,
        minimizer=tuner.Minimizer.DifferentialEvolution)
    spectralRadiusOptimum, inputScalingOptimum, reservoirScalingOptimum, leakingRateOptimum = resTuner.getOptimalParameters(
    )

    #Train
    network = ESN.Reservoir(size=size,
                            spectralRadius=spectralRadiusOptimum,
                            inputScaling=inputScalingOptimum,
                            reservoirScaling=reservoirScalingOptimum,
                            leakingRate=leakingRateOptimum,
                            initialTransient=initialTransient,
                            inputData=trainingInputData,
                            outputData=trainingOutputData,
                            inputWeightRandom=inputWeightMatrix,
                            reservoirWeightRandom=reservoirWeightMatrix)
    network.trainReservoir()

    warmupFeatureVectors, warmTargetVectors = formFeatureVectors(
        validationOutputData)
    predictedWarmup = network.predict(warmupFeatureVectors[-initialTransient:])

    initialInputSeedForTesing = validationOutputData[-1]

    predictedOutputData = predictFuture(network, initialInputSeedForTesing,
                                        testingData.shape[0])[:, 0]

    cumError = 0
    times = 100
    for i in range(times):
        # Run for many time and get the average regression error
        regressionError = util.trainAndGetError(
            size=size,
            spectralRadius=spectralRadiusOptimum,
            inputScaling=inputScalingOptimum,
            reservoirScaling=reservoirScalingOptimum,
            leakingRate=leakingRateOptimum,
            initialTransient=initialTransient,
            trainingInputData=trainingInputData,
            trainingOutputData=trainingOutputData,
            inputWeightMatrix=inputWeightMatrix,
            reservoirWeightMatrix=reservoirWeightMatrix,
            validationOutputData=validationOutputData,
            horizon=testingData.shape[0],
            testingActualOutputData=testingData)
        cumError += regressionError

    error = cumError / times
    return predictedOutputData, error
Example #20
0
def tuneTrainPredict(
    trainingInputData,
    trainingOutputData,
    validationOutputData,
    initialInputSeedForValidation,
    testingData,
    size=256,
    initialTransient=50,
    spectralRadiusBound=(0.0, 1.0),
    inputScalingBound=(0.0, 1.0),
    reservoirScalingBound=(0.0, 1.0),
    leakingRateBound=(0.0, 1.0),
    reservoirTopology=None,
):

    # Generate the input and reservoir weight matrices based on the reservoir topology
    inputWeightMatrix = topology.ClassicInputTopology(
        inputSize=trainingInputData.shape[1], reservoirSize=size
    ).generateWeightMatrix()
    if reservoirTopology is None:
        reservoirWeightMatrix = topology.ClassicReservoirTopology(size=size).generateWeightMatrix()
    else:  # TODO - think about matrix multiplication
        reservoirWeightMatrix = reservoirTopology.generateWeightMatrix()

    resTuner = tuner.ReservoirParameterTuner(
        size=size,
        initialTransient=initialTransient,
        trainingInputData=trainingInputData,
        trainingOutputData=trainingOutputData,
        initialSeed=initialInputSeedForValidation,
        validationOutputData=validationOutputData,
        spectralRadiusBound=spectralRadiusBound,
        inputScalingBound=inputScalingBound,
        reservoirScalingBound=reservoirScalingBound,
        leakingRateBound=leakingRateBound,
        inputWeightMatrix=inputWeightMatrix,
        reservoirWeightMatrix=reservoirWeightMatrix,
        minimizer=tuner.Minimizer.DifferentialEvolution,
    )
    spectralRadiusOptimum, inputScalingOptimum, reservoirScalingOptimum, leakingRateOptimum = (
        resTuner.getOptimalParameters()
    )

    # Train
    network = ESN.Reservoir(
        size=size,
        spectralRadius=spectralRadiusOptimum,
        inputScaling=inputScalingOptimum,
        reservoirScaling=reservoirScalingOptimum,
        leakingRate=leakingRateOptimum,
        initialTransient=initialTransient,
        inputData=trainingInputData,
        outputData=trainingOutputData,
        inputWeightRandom=inputWeightMatrix,
        reservoirWeightRandom=reservoirWeightMatrix,
    )
    network.trainReservoir()

    warmupFeatureVectors, warmTargetVectors = formFeatureVectors(validationOutputData)
    predictedWarmup = network.predict(warmupFeatureVectors[-initialTransient:])

    initialInputSeedForTesing = validationOutputData[-1]

    predictedOutputData = predictFuture(network, initialInputSeedForTesing, testingData.shape[0])[:, 0]

    cumError = 0
    times = 100
    for i in range(times):
        # Run for many time and get the average regression error
        regressionError = util.trainAndGetError(
            size=size,
            spectralRadius=spectralRadiusOptimum,
            inputScaling=inputScalingOptimum,
            reservoirScaling=reservoirScalingOptimum,
            leakingRate=leakingRateOptimum,
            initialTransient=initialTransient,
            trainingInputData=trainingInputData,
            trainingOutputData=trainingOutputData,
            inputWeightMatrix=inputWeightMatrix,
            reservoirWeightMatrix=reservoirWeightMatrix,
            validationOutputData=validationOutputData,
            horizon=testingData.shape[0],
            testingActualOutputData=testingData,
        )
        cumError += regressionError

    error = cumError / times
    return predictedOutputData, error
from sklearn import preprocessing as pp
import numpy as np
from reservoir import Utility as util

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 4000 points
data = data[:5000].reshape((5000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
validationInputData, validationOutputData = util.formFeatureVectors(validationData)

spectralRadiusBound = (0.0, 1.00)
inputScalingBound = (0.0, 1.0)
reservoirScalingBound = (0.0, 1.0)
leakingRateBound = (0.0, 1.0)
size = 256
initialTransient = 50
resTuner = tuner.ReservoirTuner(size=size,
                                initialTransient=initialTransient,
from sklearn import preprocessing as pp
import numpy as np
from reservoir import Utility as util

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 4000 points
data = data[:5000].reshape((5000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
validationInputData, validationOutputData = util.formFeatureVectors(
    validationData)

spectralRadiusBound = (0.0, 1.00)
inputScalingBound = (0.0, 1.0)
reservoirScalingBound = (0.0, 1.0)
leakingRateBound = (0.0, 1.0)
size = 256
initialTransient = 50
resTuner = tuner.ReservoirTuner(size=size,
Example #23
0
# Forecasting parameters
depth = 30

# Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
#minMax = pp.MinMaxScaler((-1,1))
minMax = pp.StandardScaler()
data = minMax.fit_transform(data)

# Get only 6000 points
data = data[:5000].reshape((5000, 1))

# Number of points - 5000
trainingData, testingData = util.splitData2(data, 0.85)
availableData = trainingData
nTesting = testingData.shape[0]

# Divide the training data into training and validation
validationRatio = 0.4
trainingData, validationData = util.splitData2(trainingData,
                                               1.0 - validationRatio)

# Form feature vectors
trainingFeatureVectors, trainingTargetVectors = formFeatureTargetVectors(
    trainingData, depth)
validationFeatureVectors, validationTargetVectors = formFeatureTargetVectors(
    validationData, depth)
testingFeatureVectors, testingTargetVectors = formFeatureTargetVectors(
    testingData, depth)
from timeit import default_timer as time

startTime = time()

# Read data from the file
data = np.loadtxt("MackeyGlass_t17.txt")

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

# Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.6, 0.3, 0.1)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]
networkSize = 500
populationSize = 10
noOfBest = int(populationSize / 2)
noOfGenerations = 10
predictedOutputData, bestPopulation = utilityGA.tuneTrainPredictConnectivityGA(
    trainingInputData=trainingInputData,
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.5, 0.25, 0.25)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
#actualOutputData = minMax.inverse_transform(np.vstack((validationData[:nValidation],testingData[:nTesting])))[:,0]
actualOutputData = minMax.inverse_transform(testingData)[:,0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData = util.tuneTrainPredict(trainingInputData=trainingInputData,
                                            trainingOutputData=trainingOutputData,
                                            validationOutputData=validationData,
                                            initialInputSeedForValidation=initialSeedForValidation,
from sklearn import preprocessing as pp
from reservoir import Utility as util
from performance import ErrorMetrics as rmse

# Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:5000].reshape((5000, 1))

# Number of points - 5000
trainingData, testingData = util.splitData2(data, 0.4)
nTesting = testingData.shape[0]

# Form feature vectors
inputTrainingData, outputTrainingData = util.formFeatureVectors(trainingData)

# Tune the network
size = 256
initialTransient = 50

# Input-to-reservoir fully connected
inputWeight = topology.ClassicInputTopology(
    inputSize=inputTrainingData.shape[1],
    reservoirSize=size).generateWeightMatrix()

# Reservoir-to-reservoir fully connected
results = open(outputFolderName+"/OptimalParameters.res", 'w')

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.5, 0.25, 0.25)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:,0]

# Initial seed
initialSeedForValidation = trainingData[-1]

# Error function
error = metrics.MeanSquareError()

# Number of iterations
iterations = 1