) #if (loadFile != ""): #net1.load_params_from(loadFile) net1.max_epochs = 50 net1.update_learning_rate = ln; return net1 generations, generationsToInputs, generationsToOutputs = dataParser.parse(fname = "whole_population_0.txt") iters = 150 saveFile = "LasagneWeights400_2Layer" trainingInputs, trainingOutputs, testInputs, testOutputs = dataParser.makeSets(generationsToInputs, generationsToOutputs, generations[0:200], 1, 0.25) ln = 0.01 X = Normalizers.gaussNormalize(trainingInputs) Xtest = Normalizers.gaussNormalize(testInputs) # Y = Normalizers.gaussNormalize(trainingOutputs) Ytest = Normalizers.gaussNormalize(testOutputs) X = np.asarray(X, np.float32) Y = np.asarray(Y, np.float32) Xtest= np.asarray(Xtest, np.float32) Ytest = np.asarray(Ytest, np.float32) net = createNet(X, Y, ln, saveFile) for n in range(iters): net.fit(X, Y) # This thing try to do the fit itself y_pred = net.predict(Xtest) errors = Normalizers.deGauss(trainingOutputs, Ytest) - Normalizers.deGauss(trainingOutputs, y_pred) print(np.mean(abs(errors),axis = 0));
xTraining, yTraining, xTest, yTest = makeSets(X, Y, trainingPercent) xTraining = array(xTraining).astype(float32) yTraining = array(yTraining).astype(float32) xTest = array(xTest).astype(float32) yTest = array(yTest).astype(float32) numInputs = 1 numOutputs = 1 X = Normalizers.minMaxNormalize(xTraining) Xtest = Normalizers.minMaxNormalize(xTest) #Y = Normalizers.gaussNormalize(trainingOutputs) #Ytest = Normalizers.gaussNormalize(testOutputs) # #X = trainingInputs #Xtest = testInputs Y = yTraining Ytest = yTest hiddenSize = 40; model = Sequential() model.add(Dense(numInputs, hiddenSize, init='lecun_uniform')) model.add(Activation('sigmoid'))