from timeit import default_timer as time

startTime = time()

# Read data from the file
data = np.loadtxt("MackeyGlass_t17.txt")

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

# Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.6, 0.3, 0.1)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]
networkSize = 500
populationSize = 10
noOfBest = int(populationSize / 2)
noOfGenerations = 10
predictedOutputData, bestPopulation = utilityGA.tuneTrainPredictConnectivityGA(
    trainingInputData=trainingInputData,
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 5000 points
data = data[:5000].reshape((5000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.4, 0.4, 0.2)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData = util.tuneTrainPredictConnectivityNonBrute(
    trainingInputData=trainingInputData,
    trainingOutputData=trainingOutputData,
    validationOutputData=validationData,
    initialInputSeedForValidation=initialSeedForValidation,
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1,1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.5, 0.25, 0.25)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:,0]

# Initial seed
initialSeedForValidation = trainingData[-1]

predictedOutputData = utilGA.tuneTrainPredictGA(trainingInputData=trainingInputData,
                                            trainingOutputData=trainingOutputData,
                                            validationOutputData=validationData,
                                            initialInputSeedForValidation=initialSeedForValidation,
                                            testingData=actualOutputData
from timeit import default_timer as time

startTime = time()

#Read data from the file
data = np.loadtxt('MackeyGlass_t17.txt')

# Normalize the raw data
minMax = pp.MinMaxScaler((-1, 1))
data = minMax.fit_transform(data)

#Get only 6000 points
data = data[:6000].reshape((6000, 1))

# Split the data into training, validation and testing
trainingData, validationData, testingData = util.splitData(data, 0.6, 0.3, 0.1)
nValidation = validationData.shape[0]
nTesting = testingData.shape[0]

# Form feature vectors for training data
trainingInputData, trainingOutputData = util.formFeatureVectors(trainingData)
actualOutputData = minMax.inverse_transform(testingData)[:, 0]

# Initial seed
initialSeedForValidation = trainingData[-1]
networkSize = 500
populationSize = 200
noOfBest = int(populationSize / 2)
noOfGenerations = 100
predictedOutputData, bestPopulation = utilityGA.tuneTrainPredictConnectivityGA(
    trainingInputData=trainingInputData,