Ejemplo n.º 1
0
 def getData(self):
     if self.data is not None:
         return self.data
     if not os.path.isfile(self.datapoints_out):
         self.data = toPybrainData(self.T,
                                   self.R,
                                   self.P,
                                   self.datapoints_in,
                                   self.datapoints_out,
                                   small=('small' in self.datapoints_out))
         return self.data
     print 'loading', self.datapoints_out
     self.data = SupervisedDataSet.loadFromFile(self.datapoints_out)
     return self.data
Ejemplo n.º 2
0
Archivo: motko.py Proyecto: Tiima/Motko
 def trainfromfileds(self,
                     loops,
                     trainUntilConvergence=False,
                     smallerTS=False):
     if (smallerTS):
         filename = "Basic_Test_TrainingSet_{0}.ds".format(
             self.motkolive.colornumber)
     else:
         filename = "Basic_TrainingSet_{0}.ds".format(
             self.motkolive.colornumber)
     if (os.path.isfile(os.path.join(self.cwd, filename)) is False):
         self.motkolive.CreateTrainingset(self.motkolive.colornumber,
                                          smallerTS=smallerTS)
     self.motkolive.trainfromfileds(
         SupervisedDataSet.loadFromFile(filename), loops,
         trainUntilConvergence)
Ejemplo n.º 3
0
def testNets():
    ds = SupervisedDataSet.loadFromFile('SynapsemonPie/boards')
    net20 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer20.xml') 
    net50 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer50.xml') 
    net80 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer80.xml') 
    net110 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer110.xml') 
    net140 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer140.xml') 
    trainer20 = BackpropTrainer(net20, ds)
    trainer50 = BackpropTrainer(net50, ds)
    trainer80 = BackpropTrainer(net80, ds)
    trainer110 = BackpropTrainer(net110, ds)
    trainer140 = BackpropTrainer(net140, ds)
    print trainer20.train()
    print trainer50.train()
    print trainer80.train()
    print trainer110.train()
    print trainer140.train()
Ejemplo n.º 4
0
def testNets():
    ds = SupervisedDataSet.loadFromFile('SynapsemonPie/boards')
    net20 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer20.xml')
    net50 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer50.xml')
    net80 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer80.xml')
    net110 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer110.xml')
    net140 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer140.xml')
    trainer20 = BackpropTrainer(net20, ds)
    trainer50 = BackpropTrainer(net50, ds)
    trainer80 = BackpropTrainer(net80, ds)
    trainer110 = BackpropTrainer(net110, ds)
    trainer140 = BackpropTrainer(net140, ds)
    print trainer20.train()
    print trainer50.train()
    print trainer80.train()
    print trainer110.train()
    print trainer140.train()
Ejemplo n.º 5
0
def run(layers, show, epochs):
	# load data from storage
	print("Loading Data from storage...")
	DS = SupervisedDataSet.loadFromFile("Data/DSSuperNorm")
	TrainDS, TestDS = DS.splitWithProportion(0.7)

	for _, target in TrainDS:
		for x in range(8):
			if target[x] == 1:
				target[x] = .9
			else:
				target[x] = .1

	for _, target in TestDS:
		for x in range(8):
			if target[x] == 1:
				target[x] = .9
			else:
				target[x] = .1

	# create network with 7 inputs, 15 neurons in hidden layer and 4 in output layer
	# define that the range of inputs will be from -1 to 1 and there will be 
	print("Setting up NN...")
	net = nl.net.newff(nl.tool.minmax(TestDS['input']), layers)

	net.layers[-1].transf = nl.trans.SoftMax()


	# train the NN
	print("Training NN...")
	err = net.train(TestDS['input'], TestDS['target'], show=show, epochs=epochs, goal=0.000000000001)


	ary = net.sim(TrainDS['input'])


	# Display the miss rate for the testing data
	return missRate(ary, TrainDS['target'])
Ejemplo n.º 6
0
def load3OrderDataSet():
    ds = SupervisedDataSet.loadFromFile(root.path() + '/res/dataset3')
    return ds
Ejemplo n.º 7
0
    myfile.write(str(i)+'\n')
myfile.close()

#activate the neural networks
act = SupervisedDataSet(1,1)
act.addSample((0.2,),(0.880422606518061,))
n.activateOnDataset(act)
#create the test DataSet
x = numpy.arange(0.0, 1.0+0.01, 0.01)
s = 0.5+0.4*numpy.sin(2*numpy.pi*x)
tsts = SupervisedDataSet(1,1)
tsts.setField('input',x.reshape(len(x),1))
tsts.setField('target',s.reshape(len(s),1))

#read the train DataSet from file
trndata = SupervisedDataSet.loadFromFile(os.path.join(os.getcwd(),'trndata'))

#create the trainer

t = BackpropTrainer(n, learningrate = 0.01 ,
                    momentum = mom)
#train the neural network from the train DataSet

cterrori=1.0
print "trainer momentum:"+str(mom)
for iter in range(25):
  t.trainOnDataset(trndata, 1000)
  ctrndata = mv.calculateModuleOutput(n,trndata)
  cterr = v.MSE(ctrndata,trndata['target'])
  relerr = abs(cterr-cterrori)
  cterrori = cterr
Ejemplo n.º 8
0
from pybrain.datasets import SupervisedDataSet
print "Reading data set.."
DS = SupervisedDataSet.loadFromFile('dataset.csv')

#Split validation set
DStest, DStrain = DS.splitWithProportion(0.25)

#train nn
from sf.helpers import NeuralNet3L

print "Training network with {0} examples".format(len(DStrain))
net = NeuralNet3L(len(DStrain['input'][0]), 200, 1)
net.train(DStrain, lambda_reg=5, maxiter=40)

pvec = net.activate(DStest['input'])
err = 0
m = len(pvec)
print "Testing with {0} examples.".format(len(DStest))
for i in range(m):
    p = round(pvec[i])
    t = DStest['target'][i]
    if p != t: err += 1

print "Error on test set is:{0}%".format(err * 100 / m)
Ejemplo n.º 9
0
def load_dataset():
        open_filename = tkFileDialog.askopenfilename()
        global ds
        ds=SupervisedDataSet.loadFromFile(open_filename)
Ejemplo n.º 10
0
from pybrain.structure import RecurrentNetwork, FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer
from pybrain.structure import FullConnection
from pybrain.datasets import SupervisedDataSet, ClassificationDataSet
from pybrain.utilities import percentError
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules import SoftmaxLayer, BiasUnit
from pylab import ion, ioff, figure, draw, contourf, clf, show, hold, plot
from scipy import diag, arange, meshgrid, where
from numpy.random import multivariate_normal
from numpy import array_equal
import pickle


DSSuperNorm = SupervisedDataSet.loadFromFile("Data/DSSuperNorm")

fileObject = open('NN.pybrain.net','r')

net = pickle.load(fileObject)

TrainDS, TestDS = DSSuperNorm.splitWithProportion(0.99)

for inpt, target in TestDS:
	sum = 0
	guess = net.activate(inpt)
	print("Hiphop\t  Jazz\t\tClassical\t\tCountry\t\tDance\t\tMetal\t\tReggae\t\tRock")
	for x in guess:
		sum += x
		print("{0:.6f}".format(x), end=' ')
	print("-> {}".format(target))
Ejemplo n.º 11
0
from pybrain.datasets import SupervisedDataSet
print "Reading data set.."
DS = SupervisedDataSet.loadFromFile('dataset.csv')

#Split validation set
DStest, DStrain = DS.splitWithProportion( 0.25 )

#train nn
from sf.helpers import NeuralNet3L

print "Training network with {0} examples".format(len(DStrain))
net = NeuralNet3L(len(DStrain['input'][0]), 200, 1)
net.train(DStrain,lambda_reg=5,maxiter=40)


pvec = net.activate(DStest['input'])
err = 0
m = len(pvec)
print "Testing with {0} examples.".format(len(DStest))
for i in range(m):
    p = round(pvec[i])
    t = DStest['target'][i]
    if p != t:err+=1
    
print "Error on test set is:{0}%".format(err*100/m)
Ejemplo n.º 12
0
from pybrain.structure import RecurrentNetwork, FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer
from pybrain.structure import FullConnection
from pybrain.datasets import SupervisedDataSet, ClassificationDataSet
from pybrain.utilities import percentError
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules import SoftmaxLayer, BiasUnit
from pylab import ion, ioff, figure, draw, contourf, clf, show, hold, plot
from scipy import diag, arange, meshgrid, where
from numpy.random import multivariate_normal
from numpy import array_equal
import pickle

DSSuperRaw = SupervisedDataSet.loadFromFile("Data/DSSuperRaw")
DSClassRaw = ClassificationDataSet.loadFromFile("Data/DSClassRaw")

DSSuperWhiten = SupervisedDataSet.loadFromFile("Data/DSSuperWhiten")
DSClassWhiten = ClassificationDataSet.loadFromFile("Data/DSClassWhiten")

DSSuperNorm = SupervisedDataSet.loadFromFile("Data/DSSuperNorm")
DSClassNorm = ClassificationDataSet.loadFromFile("Data/DSClassNorm")

layers = (14, 14, 8)

net = buildNetwork(*layers, hiddenclass=TanhLayer, bias=True, outputbias=True, outclass=SoftmaxLayer, recurrent=True)

TrainDS, TestDS = DSSuperNorm.splitWithProportion(0.7)

# TrainDS._convertToOneOfMany()
Ejemplo n.º 13
0
def load_dataset():
    open_filename = tkFileDialog.askopenfilename()
    global ds
    ds = SupervisedDataSet.loadFromFile(open_filename)
    print ds
Ejemplo n.º 14
0
	def loadDataSets(self, filename):
		self.testDs = SupervisedDataSet.loadFromFile('test' + filename)
		self.trainDs = SupervisedDataSet.loadFromFile('train' + filename)
Ejemplo n.º 15
0
    return ds

ceas = Caesar()

if  os.path.isfile('C:\\Users\\maxence\\Documents\\net.xml'): 
    print 'Loading Net from file'
    net=NetworkReader.readFrom('C:\\Users\\maxence\\Documents\\net.xml')
else:
    print 'Building Network'
    net = buildNetwork(50, 150, 50, bias=True, hiddenclass=TanhLayer)
#50 char max
#Normaliazed between -1 and 1 on ASCII 255, 0 for empty char,-1,993=1

if  os.path.isfile('C:\\Users\\maxence\\Documents\\ds.xml'):
    print 'Loading Dataset from file'
    ds = SupervisedDataSet.loadFromFile('C:\\Users\\maxence\\Documents\\ds.xml')  
else:
    print 'Building Dataset'
    ds = constructDataset()

tstdata,trndata =ds.splitWithProportion(0.1)
trainer = BackpropTrainer(net, trndata)

#print 'Untrained:'
#print [0,1], net.activate([0,1])
#print [0,0], net.activate([0,0])
#print [1,1], net.activate([1,1])
print 'Training'
trnerr, valerr = trainer.trainUntilConvergence( dataset=trndata,maxEpochs=50,verbose=True )
pl.plot(trnerr,'b',valerr,'r')    
pl.show()
Ejemplo n.º 16
0
 def read_data(self, fName="./data/mydata"):
     self.ds = SupervisedDataSet.loadFromFile(fName)
Ejemplo n.º 17
0
	#	print "Returned Data", net.activate(testdata)

def	ActivateNet (data):
	return net.activate(data)

#main program execution	

partition_size = int(raw_input("Partition size: "))

#dataset
dataset = SupervisedDataSet(partition_size*partition_size, 2)

load = raw_input("Do you want to load the dataset from file?: ")

if (load == 'y'):
	dataset = dataset.loadFromFile("dataset")

else:
	for filename in os.listdir("Images(Training)/A"):
		print filename
		image_file='Images(Training)/A/'+ filename
		colordata = ProcessImage(image_file, partition_size)
		#webbrowser.open("pixels.png")
		#raw_input()
		dataset.addSample(colordata, (1, 0))
		
	for filename in os.listdir("Images(Training)/B"):
		print filename
		image_file='Images(Training)/B/'+ filename
		colordata = ProcessImage(image_file, partition_size)
		#webbrowser.open("pixels.png")
Ejemplo n.º 18
0
 def load_dataset(self, open_filename):
     self.ds = SupervisedDataSet.loadFromFile(open_filename)
Ejemplo n.º 19
0
from pybrain.datasets import SupervisedDataSet
from pybrain.tools.customxml.networkreader import NetworkReader
from pybrain.supervised.trainers import BackpropTrainer
from os.path import isfile
from util import feature_to_names, push_to_int, int_to_side
from constants import *

assert isfile(NETWORK_FILE_NAME)
assert isfile(TEST_FILE_NAME)

test_ds = SupervisedDataSet.loadFromFile(TEST_FILE_NAME)
print "Test dataset loaded"

net = NetworkReader.readFrom(NETWORK_FILE_NAME)
print "Network loaded"

trainer = BackpropTrainer(net)
trainer.testOnData(test_ds, verbose = True)

error = 0

for datum in test_ds:
    x, y = datum[0], datum[1][0]
    predict = push_to_int(net.activate(x))
    error += predict != y
    # print "Heroes: {0}, Result: {1}, Predict: {2}".format(", ".join(feature_to_names(x)), int_to_side(y), int_to_side(predict))

print "{0} errors out of {1} data".format(error, len(test_ds))
print "Error rate: {0}".format(float(error) / len(test_ds))
Ejemplo n.º 20
0
 def read_data(self,fName="./data/mydata"):
     self.ds = SupervisedDataSet.loadFromFile(fName)
Ejemplo n.º 21
0
from pybrain.datasets import SupervisedDataSet
from pybrain.tools.customxml.networkreader import NetworkReader
from pybrain.supervised.trainers import BackpropTrainer
from os.path import isfile
from util import feature_to_names, push_to_int, int_to_side
from constants import *

assert isfile(NETWORK_FILE_NAME)
assert isfile(TEST_FILE_NAME)

test_ds = SupervisedDataSet.loadFromFile(TEST_FILE_NAME)
print "Test dataset loaded"

net = NetworkReader.readFrom(NETWORK_FILE_NAME)
print "Network loaded"

trainer = BackpropTrainer(net)
trainer.testOnData(test_ds, verbose=True)

error = 0

for datum in test_ds:
    x, y = datum[0], datum[1][0]
    predict = push_to_int(net.activate(x))
    error += predict != y
    # print "Heroes: {0}, Result: {1}, Predict: {2}".format(", ".join(feature_to_names(x)), int_to_side(y), int_to_side(predict))

print "{0} errors out of {1} data".format(error, len(test_ds))
print "Error rate: {0}".format(float(error) / len(test_ds))
Ejemplo n.º 22
0
def load3OrderDataSet():
    ds = SupervisedDataSet.loadFromFile(root.path() + '/res/dataset3')
    return ds
Ejemplo n.º 23
0
 def load_dataset(self,open_filename):
     self.ds = SupervisedDataSet.loadFromFile(open_filename)
Ejemplo n.º 24
0
def getDatasetFromFile(path = "/res/dataSet"):
    return SupervisedDataSet.loadFromFile(path)
Ejemplo n.º 25
0
from pybrain.structure			import TanhLayer
import Lobsang
Lobsang.begin()
#Lobsang.wheels.calibrate_speeds(-0.8)
Lobsang.head.aim(1430, 1430)

print "Setting up..."
'''ds = SupervisedDataSet(1, 2)
ds.addSample((2,), (-6, -6))
ds.addSample((4,), (-4, -4))
ds.addSample((6,), (0, 0))
ds.addSample((8,), (4, 4))
ds.addSample((10,), (6, 6))'''

ds = SupervisedDataSet(1, 1)
ds.loadFromFile("nndist.ds")

ds.addSample((2,), (-6,))
ds.addSample((4,), (-4,))
ds.addSample((6,),  (2,))
ds.addSample((8,),  (4,))
ds.addSample((10,), (6,))

net = buildNetwork(1, 5, 1, bias=True, hiddenclass=TanhLayer)
trainer = BackpropTrainer(net, ds)

loop_count = 0
train_count = 0
try:
	print "Training 1000 times..."
	while train_count < 1000: