Exemple #1
0
    def buildNN(self, net, functions, inp, out):
        layers = []

        inLayer = self.func[functions[0]](inp)
        layers.append(inLayer)
        outLayer = self.func[functions[-1]](out)

        for neural in range(1, len(net) - 1):
            layers.append(self.func[functions[neural]](1))
        layers.append(outLayer)

        connections, recConnections = self.fillConnections(net, [], [0], layers)
        if len(recConnections) == 0:
            n = FeedForwardNetwork()
        else:
            n = RecurrentNetwork()
        n.addInputModule(inLayer)
        for layer in range(1, len(layers) - 1):
            n.addModule(layers[layer])
        n.addOutputModule(outLayer)

        for con in connections:
            n.addConnection(con)
        for rcon in recConnections:
            n.addRecurrentConnection(rcon)
        n.sortModules()
        return n
Exemple #2
0
    def __init__(self, nin, nout):
        singleton.append(self)

        self.inn = nin
        self.outn = nout

        self.n = buildNetwork(nin, 20, nout, bias=False, recurrent=True)
        self.n = RecurrentNetwork()
        self.n.addInputModule(LinearLayer(nin, name='in'))
        self.n.addOutputModule(LinearLayer(nout, name='out'))

        self.n.addModule(SigmoidLayer(8, name='hidden2'))
        self.n.addModule(TanhLayer(nin+nout/2, name='hidden1'))
        self.n.addModule(BiasUnit(name='bias'))
        self.n.addModule(LSTMLayer(5, name='memory'))

        self.n.addConnection(FullConnection(self.n['in'], self.n['hidden1']))
        self.n.addConnection(FullConnection(self.n['bias'], self.n['hidden1']))
        self.n.addConnection(FullConnection(self.n['hidden1'], self.n['hidden2']))
        self.n.addConnection(FullConnection(self.n['hidden2'], self.n['out']))
        self.n.addConnection(FullConnection(self.n['hidden1'], self.n['memory']))
        self.n.addConnection(FullConnection(self.n['memory'], self.n['hidden2']))
        self.n.addConnection(FullConnection(self.n['in'], self.n['hidden2']))
        self.n.addConnection(FullConnection(self.n['hidden2'], self.n['out']))
        
        self.n.addRecurrentConnection(FullConnection(self.n['hidden1'], self.n['hidden1']))
        self.n.addRecurrentConnection(FullConnection(self.n['memory'], self.n['hidden1']))
        self.n.sortModules()
Exemple #3
0
 def __init__(self, dire, x, y, genome):
     self.direction = dire
     self.x = x
     self.y = y
     self.genome = genome
     self.hunger = 150
     self.boredom = 150
     self.pain = 0
     self.last_hunger = 150
     self.last_boredom = 150
     self.last_pain = 0
     self.last_obj = None
     #self.net = FeedForwardNetwork()
     self.net = RecurrentNetwork()
     self.net.sequential = False
Exemple #4
0
    def __init__(self):
        self.n = RecurrentNetwork()
        inLayer = LinearLayer(8)
        hiddenLayer = SigmoidLayer(4)
        self.numInputs = 8
        outLayer = LinearLayer(4)
        self.n.addInputModule(inLayer)
        self.n.addModule(hiddenLayer)
        self.n.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        self.n.addConnection(in_to_hidden)
        self.n.addConnection(hidden_to_out)

        self.n.sortModules()
        self.ds = SupervisedDataSet(8, 4) 
        self.trainer = BackpropTrainer(self.n, self.ds)
Exemple #5
0
	def initialize(self, verbose):
		print("Initializing language learner...")
		self.verbose = verbose

		# Create network and modules
		self.net = RecurrentNetwork()
		inp = LinearLayer(self.inputs, name="in")
		hiddenModules = []
		for i in range(0, self.hiddenLayers):
			hiddenModules.append(LSTMLayer(self.hiddenNodes, name=("hidden-" + str(i + 1))))
		outp = LinearLayer(self.outputs, name="out")

		# Add modules to the network with recurrence
		self.net.addOutputModule(outp)
		self.net.addInputModule(inp)
		
		for module in hiddenModules:
			self.net.addModule(module)

		# Create connections

		self.net.addConnection(FullConnection(self.net["in"], self.net["hidden-1"]))
		for i in range(0, len(hiddenModules) - 1):
			self.net.addConnection(FullConnection(self.net["hidden-" + str(i + 1)], self.net["hidden-" + str(i + 2)]))
			self.net.addRecurrentConnection(FullConnection(self.net["hidden-" + str(i + 1)], self.net["hidden-" + str(i + 1)]))
		self.net.addRecurrentConnection(FullConnection(self.net["hidden-" + str(len(hiddenModules))],
			self.net["hidden-" + str(len(hiddenModules))]))
		self.net.addConnection(FullConnection(self.net["hidden-" + str(len(hiddenModules))], self.net["out"]))
		self.net.sortModules()

		self.trainingSet = SequentialDataSet(self.inputs, self.outputs)
		for x, y in zip(self.dataIn, self.dataOut):
			self.trainingSet.newSequence()
			self.trainingSet.appendLinked([x], [y])

		self.net.randomize()

		print("Neural network initialzed with structure:")
		print(self.net)

		self.trainer = BackpropTrainer(self.net, self.trainingSet, verbose=verbose)
		self.__initialized = True
		print("Successfully initialized network.")
class BrainController:

    indim = 2
    outdim = 2

    def __init__(self, trained_net=None):
        if trained_net == None:
            self.net = RecurrentNetwork()
            self.init_network(self.net)
        else:
            self.net = trained_net

    def init_network(self, net):
        net.addInputModule(LinearLayer(2, 'in'))
        net.addModule(SigmoidLayer(3, 'hidden'))
        net.addOutputModule(LinearLayer(2, 'out'))
        net.addModule(BiasUnit(name='bias'))
        net.addConnection(FullConnection(net['in'], net['hidden']))
        net.addConnection(FullConnection(net['hidden'], net['out']))
        net.sortModules()

    def train(self, data):
        ds = SupervisedDataSet(2, 2)
        for i in range(0, len(data)):
            input, target = data[i]
            ds.addSample(input, target)

        trainer = BackpropTrainer(self.net,
                                  ds,
                                  learningrate=0.01,
                                  momentum=0.99,
                                  verbose=True)

        max_error = 1e-5
        error = 1
        while abs(error) >= max_error:
            error = trainer.train()

        #self.validate_net()
        f = open('neuro.net', 'w')
        pickle.dump(self.net, f)
        f.close()

    def validate_net(self):
        print self.net.activate([0, 0])
        print self.net.activate([0, 1])
        print self.net.activate([0, 2])
        print self.net.activate([1, 0])
        print self.net.activate([1, 1])
        print self.net.activate([1, 2])
Exemple #7
0
    def __init__(self, name, dataset, trained, store):
        self.name = name
        self.store = store
        self.trained = trained
        self.dataset = dataset

        self.net = RecurrentNetwork()
        self.net.addInputModule(LinearLayer(2, name='in'))
        self.net.addModule(SigmoidLayer(3, name='hidden'))
        self.net.addOutputModule(LinearLayer(2, name='out'))
        self.net.addConnection(FullConnection(self.net['in'], self.net['out'], name='c1'))
        self.net.addConnection(FullConnection(self.net['hidden'], self.net['out'], name='c2'))
        self.net.addRecurrentConnection(FullConnection(self.net['hidden'], self.net['hidden'], name='c3'))
        self.net.sortModules()
        '''
        self.net = buildNetwork(2, 3, 2)
        '''
        if not self.trained:
            self.train()

        return
class BrainController:

    indim = 2
    outdim = 2

    def __init__(self, trained_net = None):
        if trained_net == None:
            self.net = RecurrentNetwork()
            self.init_network(self.net)
        else:
            self.net = trained_net

    def init_network(self, net):
        net.addInputModule(LinearLayer(2, 'in'))
        net.addModule(SigmoidLayer(3, 'hidden'))
        net.addOutputModule(LinearLayer(2, 'out'))
        net.addModule(BiasUnit(name='bias'))
        net.addConnection(FullConnection(net['in'], net['hidden']))
        net.addConnection(FullConnection(net['hidden'], net['out']))
        net.sortModules()

    def train(self, data):
        ds = SupervisedDataSet(2, 2)
        for i in range(0, len(data)):
            input, target = data[i]
            ds.addSample(input, target)

        trainer = BackpropTrainer(self.net, ds, learningrate=0.01, momentum=0.99,
                verbose=True)

        max_error = 1e-5
        error = 1
        while abs(error) >= max_error:
            error = trainer.train()

        #self.validate_net()
        f = open('neuro.net', 'w')
        pickle.dump(self.net, f)
        f.close()

    def validate_net(self):
        print self.net.activate([0, 0])
        print self.net.activate([0, 1])
        print self.net.activate([0, 2])
        print self.net.activate([1, 0])
        print self.net.activate([1, 1])
        print self.net.activate([1, 2])
Exemple #9
0
def testTraining():
    # the AnBnCn dataset (sequential)
    d = AnBnCnDataSet()
    
    # build a recurrent network to be trained
    hsize = 2
    n = RecurrentNetwork()
    n.addModule(TanhLayer(hsize, name = 'h'))
    n.addModule(BiasUnit(name = 'bias'))
    n.addOutputModule(LinearLayer(1, name = 'out'))
    n.addConnection(FullConnection(n['bias'], n['h']))
    n.addConnection(FullConnection(n['h'], n['out']))
    n.addRecurrentConnection(FullConnection(n['h'], n['h']))
    n.sortModules()

    # initialize the backprop trainer and train
    t = BackpropTrainer(n, learningrate = 0.1, momentum = 0.0, verbose = True)
    t.trainOnDataset(d, 200)
    
    # the resulting weights are in the network:
    print 'Final weights:', n.params
# which are raw price movements)
dataset, tgt = dtools.gen_ds(ltc, 1, ltc_opts, "CRT")

# initialize a pybrain dataset
DS = SupervisedDataSet(len(dataset.values[0]), np.size(tgt.values[0]))

# fill it
for i in xrange(len(dataset)):
    DS.appendLinked(dataset.values[i], [tgt.values[i]])

# split 70% for training, 30% for testing
train_set, test_set = DS.splitWithProportion(0.7)

# build our recurrent network with 10 hidden neurodes, one recurrent
# connection, using tanh activation functions
net = RecurrentNetwork()
hidden_neurodes = 10
net.addInputModule(LinearLayer(len(train_set["input"][0]), name="in"))
net.addModule(TanhLayer(hidden_neurodes, name="hidden1"))
net.addOutputModule(LinearLayer(len(train_set["target"][0]), name="out"))
net.addConnection(FullConnection(net["in"], net["hidden1"], name="c1"))
net.addConnection(FullConnection(net["hidden1"], net["out"], name="c2"))
net.addRecurrentConnection(FullConnection(net["out"], net["hidden1"], name="cout"))
net.sortModules()
net.randomize()

# train for 30 epochs (overkill) using the rprop- training algorithm
trainer = RPropMinusTrainer(net, dataset=train_set, verbose=True)
trainer.trainOnDataset(train_set, 30)

# test on training set
Exemple #11
0
from pybrain.structure import RecurrentNetwork, FullConnection
from pybrain.structure import LinearLayer
# from pybrain.structure import SigmoidLayer
from pybrain.structure import TanhLayer
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer


# Define network structure

network = RecurrentNetwork(name="XOR")

inputLayer = LinearLayer(2, name="Input")
hiddenLayer = TanhLayer(3, name="Hidden")
outputLayer = LinearLayer(1, name="Output")

network.addInputModule(inputLayer)
network.addModule(hiddenLayer)
network.addOutputModule(outputLayer)

c1 = FullConnection(inputLayer, hiddenLayer, name="Input_to_Hidden")
c2 = FullConnection(hiddenLayer, outputLayer, name="Hidden_to_Output")
c3 = FullConnection(hiddenLayer, hiddenLayer, name="Recurrent_Connection")

network.addConnection(c1)
network.addRecurrentConnection(c3)
network.addConnection(c2)

network.sortModules()

# Add a data set
def getNetwork(trndata):
	n = RecurrentNetwork()
	n.addInputModule(LinearLayer(trndata.indim, name='in'))
	n.addModule(SigmoidLayer(100, name='hidden'))
	n.addOutputModule(LinearLayer(trndata.outdim, name='out'))
	n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
	n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
	n.addRecurrentConnection(FullConnection(n['hidden'], n['hidden'], name='c3'))
	n.sortModules()


	# fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
	trainer = BackpropTrainer( n, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)

	# TODO: return network and trainer here. Make another function for training
	# for i in range(20):
		# trainer.trainEpochs(1)
	# trainer.trainUntilConvergence(maxEpochs=100)

	# trnresult = percentError( trainer.testOnClassData(),trndata['class'] )
	# tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] )

	# print "epoch: %4d" % trainer.totalepochs, \
	# 	"  train error: %5.2f%%" % trnresult

	# out = fnn.activateOnDataset(tstdata)
	# out = out.argmax(axis=1)  # the highest output activation gives the class
	return (n, trainer)
Exemple #13
0
def trainedRFCNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))

    n.sortModules()

    draw_connections(n)
    # d = generateTraininqgData()
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count = count + 1
        if (count == 100):
            return trainedRFCNN()

    # for i in range(100):
    #     print t.train()


    exportRFCNN(n)
    draw_connections(n)

    return n
Exemple #14
0
def trainedRFCNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))

    n.sortModules()

    draw_connections(n)
    # d = generateTraininqgData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count = count + 1
        if (count == 100):
            return trainedRFCNN()

    # for i in range(100):
    #     print t.train()

    exportRFCNN(n)
    draw_connections(n)

    return n
Exemple #15
0
def buildMixedNestedNetwork():
    """ build a nested network with the inner one being a ffn and the outer one being recurrent. """
    N = RecurrentNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.addRecurrentConnection(FullConnection(c, c))
    N.sortModules()
    return N
Exemple #16
0
from pybrain.structure import FullConnection
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.datasets import SupervisedDataSet
from pybrain.tools.xml.networkwriter import NetworkWriter
from pybrain.utilities import percentError
import data_parsing
from scipy import array,where

training_data = []
ds = SupervisedDataSet(28,39)
training_data = data_parsing.conversion_to_one_hot_representation()
ds = data_parsing.conversion_to_pybrain_dataset_format(training_data)
test, train = ds.splitWithProportion( 0.25 )


n = RecurrentNetwork()

input1 = LinearLayer(28)
hidden1 = LSTMLayer(512)
hidden2 = LSTMLayer(512)
hidden3 = LSTMLayer(128)
output1 = SigmoidLayer(39)
output2 = LinearLayer(39)

n.addInputModule(input1)
n.addModule(hidden1)
n.addModule(hidden2)
n.addModule(hidden3)
n.addModule(output1)
n.addOutputModule(output2)
Exemple #17
0
def buildSimpleLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(100, name='i')
    h = LSTMLayer(10, peepholes=peepholes, name='lstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, name='f1'))
    N.addConnection(FullConnection(b, h, name='f2'))
    N.addRecurrentConnection(FullConnection(h, h, name='r1'))
    N.addConnection(FullConnection(h, o, name='r1'))
    N.sortModules()
    return N
# which are raw price movements)
dataset, tgt = dtools.gen_ds(ltc, 1, ltc_opts, "CRT")

# initialize a pybrain dataset
DS = SupervisedDataSet(len(dataset.values[0]), np.size(tgt.values[0]))

# fill it
for i in xrange(len(dataset)):
    DS.appendLinked(dataset.values[i], [tgt.values[i]])

# split 70% for training, 30% for testing
train_set, test_set = DS.splitWithProportion(.7)

# build our recurrent network with 10 hidden neurodes, one recurrent
# connection, using tanh activation functions
net = RecurrentNetwork()
hidden_neurodes = 10
net.addInputModule(LinearLayer(len(train_set["input"][0]), name="in"))
net.addModule(TanhLayer(hidden_neurodes, name="hidden1"))
net.addOutputModule(LinearLayer(len(train_set["target"][0]), name="out"))
net.addConnection(FullConnection(net["in"], net["hidden1"], name="c1"))
net.addConnection(FullConnection(net["hidden1"], net["out"], name="c2"))
net.addRecurrentConnection(
    FullConnection(net["out"], net["hidden1"], name="cout"))
net.sortModules()
net.randomize()

# train for 30 epochs (overkill) using the rprop- training algorithm
trainer = RPropMinusTrainer(net, dataset=train_set, verbose=True)
trainer.trainOnDataset(train_set, 30)
Exemple #19
0
    def createJeffersonMDLNetwork(mdl_length=2,
                                  hidden_count=5,
                                  output_count=4,
                                  in_to_out_connect=True,
                                  name=None):

        ret_net = RecurrentNetwork(name=name)

        # Add some components of the neural network.
        hidden_layer = SigmoidLayer(hidden_count, name="hidden")
        output_layer = LinearLayer(output_count, name="move")

        ret_net.addModule(hidden_layer)
        ret_net.addOutputModule(output_layer)

        ret_net.addConnection(
            FullConnection(hidden_layer,
                           output_layer,
                           name="Hidden to Move Layer"))

        mdl_prev = ()

        for idx in range(0, mdl_length):
            # Create the layers
            food_layer = LinearLayer(2, name="Food {0}".format(idx))
            mdl_layer = LinearLayer(2, name="MDL Layer {0}".format(idx))

            # Add to network
            ret_net.addModule(food_layer)
            if idx == 0:
                ret_net.addInputModule(mdl_layer)
            else:
                ret_net.addModule(mdl_layer)
                # Add delay line connection.
                ret_net.addRecurrentConnection(
                    FullConnection(mdl_prev,
                                   mdl_layer,
                                   name="Recurrent DL {0} to DL {1}".format(
                                       idx - 1, idx)))

            # Add connections for
            # - Delay line to NN.
            # - NN to Hidden.
            # - NN to Out (if desired).
            ret_net.addConnection(
                FullConnection(mdl_layer,
                               food_layer,
                               name="DL {0} to Food {0}".format(idx)))
            ret_net.addConnection(
                FullConnection(food_layer,
                               hidden_layer,
                               name="Food {0} to Hidden".format(idx)))
            if in_to_out_connect:
                ret_net.addConnection(
                    FullConnection(food_layer,
                                   output_layer,
                                   name="Food {0} to Output".format(idx)))

            mdl_prev = mdl_layer

        ret_net.sortModules()

        return ret_net
Exemple #20
0
    def createJeffersonStyleNetwork(in_count=2,
                                    hidden_count=5,
                                    output_count=4,
                                    recurrent=True,
                                    in_to_out_connect=True,
                                    name=None):
        """
        Creates a Jefferson-esque neural network for trail problem.


        Returns:
            pybrain.network. The neural network.

        """

        if recurrent:
            ret_net = RecurrentNetwork(name=name)
        else:
            ret_net = FeedForwardNetwork(name=name)

        in_layer = LinearLayer(in_count, name="food")
        hidden_layer = SigmoidLayer(hidden_count, name="hidden")
        output_layer = LinearLayer(output_count, name="move")

        ret_net.addInputModule(in_layer)
        ret_net.addModule(hidden_layer)
        ret_net.addOutputModule(output_layer)

        in_to_hidden = FullConnection(in_layer, hidden_layer)
        hidden_to_out = FullConnection(hidden_layer, output_layer)

        ret_net.addConnection(in_to_hidden)
        ret_net.addConnection(hidden_to_out)

        if in_to_out_connect:
            in_to_out = FullConnection(in_layer, output_layer)
            ret_net.addConnection(in_to_out)

        if recurrent:
            hidden_to_hidden = FullConnection(hidden_layer, hidden_layer)
            ret_net.addRecurrentConnection(hidden_to_hidden)

        ret_net.sortModules()

        return ret_net
Exemple #21
0
import itertools
from pybrain.structure import RecurrentNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.datasets import SupervisedDataSet

john = 1
bill = 2
sue = 3
mary = 4
love = 10
see = 11

ds = SupervisedDataSet(2, 1)

for verb in [love, see]:
    for a, b in itertools.combinations([john, bill, sue, mary]):
        ds.addSample((verb, a, b), (1, ))

n = RecurrentNetwork()
n.addInputModule(LinearLayer(3, name='in'))
n.addModule(SigmoidLayer(3, name='hidden'))
n.addOutputModule(LinearLayer(1, name='out'))
n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
n.addRecurrentConnection(FullConnection(n['hidden'], n['hidden'], name='c3'))

trainer = BackpropTrainer(n, ds)
trainer.train()
 def __init__(self, trained_net=None):
     if trained_net == None:
         self.net = RecurrentNetwork()
         self.init_network(self.net)
     else:
         self.net = trained_net
Exemple #23
0
def main():
    numIterations=200
    terminal_EMA_SharpeRatio=[0 for i in range(numIterations)]
    numTrades=[0 for i in range(numIterations)]
    sharpe_first_half=[0 for i in range(numIterations)]
    sharpe_sec_half=[0 for i in range(numIterations)]
    sharpe_ratio_total=[0 for i in range(numIterations)]

    for i in range(numIterations):
        env=RWEnvironment(2000)
        task = MaximizeReturnTask(env)
        numIn=min(env.worldState.shape)

        net=RecurrentNetwork()
        net.addInputModule(BiasUnit(name='bias'))
        net.addOutputModule((SignLayer(1,name='out')))
        net.addRecurrentConnection(FullConnection(net['out'], net['out'], name='c3'))
        net.addInputModule(LinearLayer(numIn,name='in'))
        net.addConnection(FullConnection(net['in'],net['out'],name='c1'))
        net.addConnection((FullConnection(net['bias'],net['out'],name='c2')))
        net.sortModules()

        ts=env.ts
        learner = RRL(numIn+2,ts) # ENAC() #Q_LinFA(2,1)
        agent = LearningAgent(net,learner)
        exp = ContinuousExperiment(task,agent)

        #performance tracking

        exp.doInteractionsAndLearn(len(ts)-1)
            #print(net._params)
        terminal_EMA_SharpeRatio[i]=learner.ema_sharpeRatio[-1]
        rs=pE.calculateTradingReturn(env.actionHistory,ts)
        sharpe_first_half[i]=pE.annualisedSharpe(rs[:(len(ts)/2)])
        sharpe_sec_half[i]=pE.annualisedSharpe(rs[len(ts)/2:])
        sharpe_ratio_total[i]=pE.annualisedSharpe(rs)
        numTrades[i]=learner.numTrades



    print(net._params)
    print("average number of trades per 1000 observations is {}".format(mean(numTrades)/2))
    print("mean Sharpe ratios are {} with standard errors {}, and {} with standard errors {}".format(mean(sharpe_first_half),st.sem(sharpe_first_half),mean(sharpe_sec_half),st.sem(sharpe_sec_half)))
    print("average sharpe ratio for each entire epoche is {} with standard error {}".format(mean(sharpe_ratio_total),st.sem(sharpe_ratio_total)))
    fig,ax= plt.subplots(nrows=2,ncols=1,sharex=True,sharey=True)
    l1=ax[0].hist(sharpe_first_half,bins=20)
    ax[0].set_title('Annualised Sharpe Ratio (t=0:1000)')
    l2=ax[1].hist(sharpe_sec_half,bins=20)
    ax[1].set_title('Annualised Sharpe Ratio (t=1001:2000)')
    plt.show()


    #plt.hist(numTrades,bins=20)


    #plt.plot(terminal_EMA_SharpeRatio)
    #plt.show()

    actionHist=env.actionHistory
    ts=[t/100 for t in ts]
    cum_log_r=cumsum([log(1+ts[i]) for i in range(len(ts))])
    cum_log_R=cumsum([log(1+(actionHist[i]*ts[i])) for i in range(len(ts))])



    fix, axes = plt.subplots(3, sharex=True)
    ln1=axes[0].plot(cum_log_r,label='Buy and Hold')
    ln2=axes[0].plot(cum_log_R,label='Trading Agent')
    lns=ln1+ln2
    labs=[l.get_label() for l in lns]
    axes[0].legend(lns,labs,loc='upper left')
    axes[0].set_ylabel("Cumulative Log Returns")
    ax[0].set_title("Artificial Series")
    ln3=axes[1].plot(actionHist,'r',label='Trades')
    axes[1].set_ylabel("F(t)")
    axes[2].plot(learner.ema_sharpeRatio)
    axes[2].set_ylabel("EMA Sharpe Ratio")
    plt.show()
Exemple #24
0
def buildMinimalMDLSTMNetwork():
    N = RecurrentNetwork('simpleMdLstmNet')
    i = LinearLayer(4, name='i')
    h = MDLSTMLayer(1, peepholes=True, name='mdlstm')
    o = LinearLayer(1, name='o')
    N.addInputModule(i)
    N.addModule(h)
    N.addOutputModule(o)
    N.addConnection(IdentityConnection(i, h, outSliceTo=4))
    N.addRecurrentConnection(
        IdentityConnection(h, h, outSliceFrom=4, inSliceFrom=1))
    N.addConnection(IdentityConnection(h, o, inSliceTo=1))
    N.sortModules()
    return N
Exemple #25
0
def trainFunc(params):
    iter, trainds, validds, input_size, hidden, func, eta, lmda, epochs = params
    print('Iter:', iter, 'Epochs:', epochs, 'Hidden_size:', hidden, 'Eta:',
          eta, 'Lamda:', lmda, 'Activation:', func)

    # Build network
    n = RecurrentNetwork()
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(func(hidden, name='hidden'))
    n.addModule(LinearLayer(hidden, name='context'))
    n.addOutputModule(LinearLayer(1, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='in_to_hidden'))
    n.addConnection(FullConnection(n['hidden'], n['out'],
                                   name='hidden_to_out'))
    n.addRecurrentConnection(FullConnection(n['hidden'], n['context']))
    rnet = n
    rnet.sortModules()

    trainer = BackpropTrainer(n,
                              trainds,
                              learningrate=eta,
                              weightdecay=lmda,
                              momentum=0.1,
                              shuffle=False)
    trainer.trainEpochs(epochs)
    pred = np.nan_to_num(n.activateOnDataset(validds))
    validerr = eval.calc_RMSE(validds['target'], pred)
    varscore = explained_variance_score(validds['target'], pred)
    return validerr, varscore, n
from pybrain.structure import RecurrentNetwork
from pybrain.structure import FullConnection, LinearLayer, LSTMLayer
from parsemusic import ds
import random
print ds

layerCount = 10

net = RecurrentNetwork()
net.addInputModule(LinearLayer(10, name='in'))
for x in range(layerCount):
    net.addModule(LSTMLayer(20, name='hidden' + str(x)))
net.addOutputModule(LinearLayer(10, name='out'))
net.addConnection(FullConnection(net['in'], net['hidden1'], name='cIn'))
for x in range(layerCount - 1):
    net.addConnection(
        FullConnection(net[('hidden' + str(x))],
                       net['hidden' + str(x + 1)],
                       name=('c' + str(x + 1))))
net.addConnection(
    FullConnection(net['hidden' + str(layerCount - 1)],
                   net['out'],
                   name='cOut'))
net.sortModules()
from pybrain.supervised import RPropMinusTrainer
trainer = RPropMinusTrainer(net, dataset=ds)

epochcount = 0
while True:
    startingnote = random.choice(range(1, 17))
    startingnote2 = random.choice(range(1, 17))
def buildElmanNetwork(hiddenSize):
    net = RecurrentNetwork()
    inLayer = LinearLayer(sampleSize())
    hiddenLayer = SigmoidLayer(hiddenSize)
    outLayer = SigmoidLayer(outputSize())
    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addOutputModule(outLayer)
    hiddenRecursive = IdentityConnection(hiddenLayer, hiddenLayer)
    inToHidden = FullConnection(inLayer, hiddenLayer)
    hiddenToOut = FullConnection(hiddenLayer, outLayer)
    net.addRecurrentConnection(hiddenRecursive)
    net.addConnection(inToHidden)
    net.addConnection(hiddenToOut)
    net.sortModules()
    net.randomize()
    return net
Exemple #28
0
 def equivalence_recurrent(self, builder):
     _net = pybrainbridge._RecurrentNetwork()
     builder(_net)
     net = RecurrentNetwork()
     builder(net)
     super(TestNetworkEquivalence, self).equivalence_recurrent(net, _net)
Exemple #29
0
__author__ = 'davidoregan'

from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer
from pybrain.structure import FullConnection
from pybrain.structure import RecurrentNetwork

#Specifiy networks
n = FeedForwardNetwork()
r = RecurrentNetwork()

LinearLayer(2, name="Nigger")
inLayer = LinearLayer(2)
hiddenLayer = SigmoidLayer(3)
outLayer = LinearLayer(1)

n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)

n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)

n.sortModules()

r.addInputModule(LinearLayer(2, name='in'))
r.addModule(SigmoidLayer(3, name='hidden'))
r.addOutputModule(LinearLayer(1, name='out'))
Exemple #30
0
class RecurrentNeuralNetwork:
    """
    Recurent neural network.
    """
    def __init__(self, nin, nout):
        singleton.append(self)

        self.inn = nin
        self.outn = nout

        self.n = buildNetwork(nin, 20, nout, bias=False, recurrent=True)
        self.n = RecurrentNetwork()
        self.n.addInputModule(LinearLayer(nin, name='in'))
        self.n.addOutputModule(LinearLayer(nout, name='out'))

        self.n.addModule(SigmoidLayer(8, name='hidden2'))
        self.n.addModule(TanhLayer(nin+nout/2, name='hidden1'))
        self.n.addModule(BiasUnit(name='bias'))
        self.n.addModule(LSTMLayer(5, name='memory'))

        self.n.addConnection(FullConnection(self.n['in'], self.n['hidden1']))
        self.n.addConnection(FullConnection(self.n['bias'], self.n['hidden1']))
        self.n.addConnection(FullConnection(self.n['hidden1'], self.n['hidden2']))
        self.n.addConnection(FullConnection(self.n['hidden2'], self.n['out']))
        self.n.addConnection(FullConnection(self.n['hidden1'], self.n['memory']))
        self.n.addConnection(FullConnection(self.n['memory'], self.n['hidden2']))
        self.n.addConnection(FullConnection(self.n['in'], self.n['hidden2']))
        self.n.addConnection(FullConnection(self.n['hidden2'], self.n['out']))
        
        self.n.addRecurrentConnection(FullConnection(self.n['hidden1'], self.n['hidden1']))
        self.n.addRecurrentConnection(FullConnection(self.n['memory'], self.n['hidden1']))
        self.n.sortModules()

    def set_learning_data(self, dataset):
        """
        Set dataset used to train network.
        """
        self.ds_learn = dataset

    def train(self, epochs=100):
        """
        Train the network
        """
        #self.n.reset()
        trainer = BackpropTrainer(self.n, self.ds_learn, verbose=True)
        # trainer.setData(self.ds_learn)
        return trainer.trainEpochs(epochs=epochs)

    def validate_error(self, dataset):
        """
        Return error value for given dataset
        """
        v = Validator()
        #self.n.reset()
        return v.MSE(self.n, dataset)

    def calculate(self, dataset):
        """
        Return network response for given dataset
        """
        #self.n.reset()
        return self.n.activateOnDataset(dataset)
Exemple #31
0
#testdf = pd.read_pickle(testdf_path)

# Construct one hot character dictionary
conglomerateString = ''
for index, row in df.iterrows():
    conglomerateString += row.values

conglomerateSet = list(set(list(conglomerateString[0])))
codeTable = pd.Series(data=conglomerateSet, index=conglomerateSet)
codeTable = pd.get_dummies(codeTable)

conglomerateSet = []
conglomerateString = []

# Construct LSTM network
rnn = RecurrentNetwork()

inputSize = len(codeTable['a'].values)
outputSize = 4
hiddenSize = 10

rnn.addInputModule(LinearLayer(dim=inputSize, name='in'))
rnn.addModule(TanhLayer(dim=hiddenSize, name='in_proc'))
rnn.addModule(LSTMLayer(dim=hiddenSize, peepholes=True, name='hidden'))
rnn.addModule(TanhLayer(dim=hiddenSize, name='out_proc'))
rnn.addOutputModule(SoftmaxLayer(dim=outputSize, name='out'))

rnn.addConnection(FullConnection(rnn['in'], rnn['in_proc'], name='c1'))
rnn.addConnection(FullConnection(rnn['in_proc'], rnn['hidden'], name='c2'))
rnn.addRecurrentConnection(
    FullConnection(rnn['hidden'], rnn['hidden'], name='c3'))
Exemple #32
0
def buildMinimalLSTMNetwork():
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(4, name='i')
    h = LSTMLayer(1, peepholes=True, name='lstm')
    o = LinearLayer(1, name='o')
    N.addInputModule(i)
    N.addModule(h)
    N.addOutputModule(o)
    N.addConnection(IdentityConnection(i, h))
    N.addConnection(IdentityConnection(h, o))
    N.sortModules()
    return N
Exemple #33
0
def construct_network(input_len, output_len, hidden_nodes, is_elman=True):
    n = RecurrentNetwork()
    n.addInputModule(LinearLayer(input_len, name="i"))
    n.addModule(BiasUnit("b"))
    n.addModule(SigmoidLayer(hidden_nodes, name="h"))
    n.addOutputModule(LinearLayer(output_len, name="o"))

    n.addConnection(FullConnection(n["i"], n["h"]))
    n.addConnection(FullConnection(n["b"], n["h"]))
    n.addConnection(FullConnection(n["b"], n["o"]))
    n.addConnection(FullConnection(n["h"], n["o"]))

    if is_elman:
        # Elman (hidden->hidden)
        n.addRecurrentConnection(FullConnection(n["h"], n["h"]))
    else:
        # Jordan (out->hidden)
        n.addRecurrentConnection(FullConnection(n["o"], n["h"]))

    n.sortModules()
    n.reset()

    return n
Exemple #34
0
def trainNetwork(dirname):

    numFeatures = 2000

    ds = SequentialDataSet(numFeatures, 1)

    tracks = glob.glob(os.path.join(dirname, "*.csv"))
    for t in tracks:
        track = os.path.splitext(t)[0]
        # load training data
        print "Reading %s..." % t
        data = numpy.genfromtxt(t, delimiter=",")
        numData = data.shape[0]

        # add the input to the dataset
        print "Adding to dataset..."
        ds.newSequence()
        for i in range(numData):
            # ds.addSample(data[i], (labels[i],))
            input = data[i]
            label = input[numFeatures]
            if label > 0:
                label = midi_util.frequencyToMidi(label)
            ds.addSample(input[0:numFeatures], (label,))

    # initialize the neural network
    print "Initializing neural network..."
    # net = buildNetwork(numFeatures, 50, 1,
    #                   hiddenclass=LSTMLayer, bias=True, recurrent=True)

    # manual network building
    net = RecurrentNetwork()
    inlayer = LinearLayer(numFeatures)
    # h1 = LSTMLayer(70)
    # h2 = SigmoidLayer(50)
    octaveLayer = LSTMLayer(5)
    noteLayer = LSTMLayer(12)
    combinedLayer = SigmoidLayer(60)
    outlayer = LinearLayer(1)

    net.addInputModule(inlayer)
    net.addOutputModule(outlayer)
    # net.addModule(h1)
    # net.addModule(h2)
    net.addModule(octaveLayer)
    net.addModule(noteLayer)
    net.addModule(combinedLayer)

    # net.addConnection(FullConnection(inlayer, h1))
    # net.addConnection(FullConnection(h1, h2))
    # net.addConnection(FullConnection(h2, outlayer))

    net.addConnection(FullConnection(inlayer, octaveLayer))
    net.addConnection(FullConnection(inlayer, noteLayer))
    # net.addConnection(FullConnection(octaveLayer,combinedLayer))
    for i in range(5):
        net.addConnection(
            FullConnection(
                octaveLayer, combinedLayer, inSliceFrom=i, inSliceTo=i + 1, outSliceFrom=i * 12, outSliceTo=(i + 1) * 12
            )
        )
    net.addConnection(FullConnection(noteLayer, combinedLayer))
    net.addConnection(FullConnection(combinedLayer, outlayer))

    net.sortModules()

    # train the network on the dataset
    print "Training neural net"
    trainer = RPropMinusTrainer(net, dataset=ds)
    ##    trainer.trainUntilConvergence(maxEpochs=50, verbose=True, validationProportion=0.1)
    error = -1
    for i in range(150):
        new_error = trainer.train()
        print "error: " + str(new_error)
        if abs(error - new_error) < 0.005:
            break
        error = new_error

    # save the network
    print "Saving neural network..."
    NetworkWriter.writeToFile(net, os.path.basename(dirname) + "designnet")
Exemple #35
0
def buildNetwork(N):
    dimension = WINDOW_SIZE
    inLayer = LinearLayer(dimension)
    hiddenLayer = SigmoidLayer(N)
    outLayer = LinearLayer(dimension)
    # bias disabled, too much over training
    #bias = BiasUnit(name='bias')
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    #bias_to_out = FullConnection(bias, outLayer)
    #bias_to_hidden = FullConnection(bias, hiddenLayer)

    net = RecurrentNetwork()
    #net.addModule(bias)
    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addOutputModule(outLayer)
    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_out)
    net.addRecurrentConnection(FullConnection(hiddenLayer, hiddenLayer))
    #net.addConnection(bias_to_hidden)
    #net.addConnection(bias_to_out)
    net.sortModules()
    return net
def runNeuralLearningCurveSimulation(dataTrain, dataTest, train_tfidf,
                                     test_tfidf, outFile):
    print 'running neural learning curve'
    outFile.write('-------------------------------------\n')
    outFile.write('train==> %d, %d \n' %
                  (train_tfidf.shape[0], train_tfidf.shape[1]))
    outFile.write('test==>  %d, %d \n' %
                  (test_tfidf.shape[0], test_tfidf.shape[1]))

    trainDS = getDataSetFromTfidf(train_tfidf, dataTrain.target)
    testDS = getDataSetFromTfidf(test_tfidf, dataTest.target)

    print "Number of training patterns: ", len(trainDS)
    print "Input and output dimensions: ", trainDS.indim, trainDS.outdim
    print "First sample (input, target, class):"
    print len(trainDS['input'][0]), trainDS['target'][0], trainDS['class'][0]
    '''
    with SimpleTimer('time to train', outFile):
        net = buildNetwork(trainDS.indim, trainDS.indim/2, trainDS.indim/4, trainDS.indim/8, trainDS.indim/16, 2, hiddenclass=TanhLayer, outclass=SoftmaxLayer)
        trainer = BackpropTrainer( net, dataset=trainDS, momentum=0.1, verbose=True, weightdecay=0.01, batchlearning=True)
    '''
    net = RecurrentNetwork()
    net.addInputModule(LinearLayer(trainDS.indim, name='in'))
    net.addModule(SigmoidLayer(trainDS.indim / 2, name='hidden'))
    net.addModule(SigmoidLayer(trainDS.indim / 4, name='hidden2'))
    net.addOutputModule(SoftmaxLayer(2, name='out'))
    net.addConnection(FullConnection(net['in'], net['hidden'], name='c1'))
    net.addConnection(FullConnection(net['hidden'], net['out'], name='c2'))
    net.addRecurrentConnection(
        FullConnection(net['hidden'], net['hidden'], name='c3'))
    net.addRecurrentConnection(
        FullConnection(net['hidden2'], net['hidden'], name='c4'))
    net.sortModules()
    trainer = BackpropTrainer(net,
                              dataset=trainDS,
                              momentum=0.01,
                              verbose=True,
                              weightdecay=0.01)

    outFile.write('%s \n' % (net.__str__()))
    epochs = 200
    with SimpleTimer('time to train %d epochs' % epochs, outFile):
        for i in range(epochs):
            trainer.trainEpochs(1)
            trnresult = percentError(trainer.testOnClassData(),
                                     trainDS['class'])
            tstresult = percentError(trainer.testOnClassData(dataset=testDS),
                                     testDS['class'])

            print "epoch: %4d" % trainer.totalepochs, \
                  "  train error: %5.2f%%" % trnresult, \
                  "  test error: %5.2f%%" % tstresult

    outFile.write('%5.2f , %5.2f \n' % (100.0 - trnresult, 100.0 - tstresult))

    predicted = trainer.testOnClassData(dataset=testDS)
    results = predicted == testDS['class'].flatten()
    wrong = []
    for i in range(len(results)):
        if not results[i]:
            wrong.append(i)
    print 'classifier got these wrong:'
    for i in wrong[:10]:
        print dataTest.data[i], dataTest.target[i]
        outFile.write('%s %d \n' % (dataTest.data[i], dataTest.target[i]))
def buildMinimalLSTMNetwork():
    N = RecurrentNetwork('simpleLstmNet')  
    i = LinearLayer(4, name='i')
    h = LSTMLayer(1, peepholes=True, name='lstm')
    o = LinearLayer(1, name='o')
    N.addInputModule(i)
    N.addModule(h)
    N.addOutputModule(o)
    N.addConnection(IdentityConnection(i, h))
    N.addConnection(IdentityConnection(h, o))
    N.sortModules()
    return N
n.addOutputModule(outLayer)

#   Full Connection class - add connections/synapses

in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
#makes our MLP usable,
n.sortModules()
print n.activate([1, 2])

print n

#Recureent Connection Class -which looks back in time one timestep.
n = RecurrentNetwork()
n.addInputModule(LinearLayer(2, name='in'))
n.addModule(SigmoidLayer(3, name='hidden'))
n.addOutputModule(LinearLayer(1, name='out'))
n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

n.sortModules()
print n.activate((2, 2))
print n.activate((2, 2))
print n.activate((2, 2))
n.reset()
print n.activate((2, 2))

#######################################
#########   Classification with feed forward networks
Exemple #39
0
all weights of the network at once. """

print(hidden2out.params)
print(n.params)

""" The former are the last slice of the latter. """

print(n.params[-3:] == hidden2out.params)

""" Ok, after having covered the basics, let's move on to some additional concepts.
First of all, we encourage you to name all modules, or connections you create, because that gives you
more readable printouts, and a very concise way of accessing them.

We now build an equivalent network to the one before, but with a more concise syntax:
"""
n2 = RecurrentNetwork(name='net2')
n2.addInputModule(LinearLayer(2, name='in'))
n2.addModule(SigmoidLayer(3, name='h'))
n2.addOutputModule(LinearLayer(1, name='out'))
n2.addConnection(FullConnection(n2['in'], n2['h'], name='c1'))
n2.addConnection(FullConnection(n2['h'], n2['out'], name='c2'))
n2.sortModules()

""" Printouts look more concise and readable: """
print(n2)

""" There is an even quicker way to build networks though, as long as their structure is nothing
more fancy than a stack of fully connected layers: """

n3 = buildNetwork(2, 3, 1, bias=False)
Exemple #40
0
from pybrain.structure import FeedForwardNetwork, RecurrentNetwork, LinearLayer, SigmoidLayer, FullConnection

####
#RECURRENT NETWORK
####

n = RecurrentNetwork()

inLayer = LinearLayer(inputVector, name='inputLayer')
hiddenLayerA = SigmoidLayer(hiddenVector, name='hiddenLayerA')
hiddenLayerB = SigmoidLayer(hiddenVector, name='hiddenLayerB')
outputLayer = LinearLayer(outputVector, name='outputLayer')

n.addInputModule(inLayer)
n.addModule(hiddenLayerA)
n.addModule(hiddenLayerB)
n.addOutputModule(outputLayer)

n.addConnection(FullConnection(n['inputLayer'], n['hiddenLayerA'], name='c1'))
n.addConnection(FullConnection(n['hiddenLayerA'], n['hiddenLayerB'], name='c2'))
n.addConnection(FullConnection(n['hiddenLayerB'], n['outputLayer'], name='c3'))

n.addRecurrentConnection(FullConnection(n['hiddenLayerA'], n['hiddenLayerB'], name='rec3'))

n.sortModules()
print 'Network One (Recurrent)' + str(n.activate([1,2,3]))
print 'Network One (Recurrent)' + str(n.activate([1,2,3]))

####
#FEED FORWARD NETWORK
####
Exemple #41
0
def  exec_algo(xml_file, output_location):
        rootObj=ml.parse(xml_file)
        file_name=rootObj.MachineLearning.prediction.datafile
        file=open(file_name)
        var_input=rootObj.MachineLearning.prediction.input
        var_output=rootObj.MachineLearning.prediction.output
        var_classes=rootObj.MachineLearning.prediction.classes

        DS=ClassificationDataSet(var_input,var_output,nb_classes=var_classes)
        #DS1=ClassificationDataSet(13,1,nb_classes=10)

        for line in file.readlines():
                data=[float(x) for x in line.strip().split(',') if x != '']
                inp=tuple(data[:var_input])
                output=tuple(data[var_input:])
                DS.addSample(inp,output)

        tstdata,trndata=DS.splitWithProportion(0)
        #trndatatest,tstdatatest=DS1.splitWithProportion(0)

        trdata=ClassificationDataSet(trndata.indim,1,nb_classes=10)
        #tsdata=ClassificationDataSet(DS1.indim,1,nb_classes=10)
        #tsdata1=ClassificationDataSet(DS1.indim,1,nb_classes=10)

        for i in xrange(trndata.getLength()):
                if (trndata.getSample(i)[1][0]!=100):
                        trdata.addSample(trndata.getSample(i)[0],trndata.getSample(i)[1])

        trdata._convertToOneOfMany()
        #tsdata._convertToOneOfMany()
        #tsdata1._convertToOneOfMany()
        print "%d" % (trdata.getLength())

        rnn=RecurrentNetwork()
        inputLayer=LinearLayer(trdata.indim)

        hiddenLayer=rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.hiddenLayerActivation
        hiddenNeurons=rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.hiddenNeurons

        if  hiddenLayer=='Sigmoid':
                hiddenLayer=SigmoidLayer(hiddenNeurons)
        elif hiddenLayer=='Softmax':
                hiddenLayer=SoftmaxLayer(hiddenNeurons)
        else:
                hiddenLayer=LinearLayer(hiddenNeurons)

        outputLayer=rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.outputLayerActivation

        if  outputLayer=='Sigmoid':
               outputLayer=SigmoidLayer(trdata.outdim)
        elif outputLayer=='Softmax':
                outputLayer=SoftmaxLayer(trdata.outdim)
        else:
                outputLayer=LinearLayer(trdata.outdim)

        rnn.addInputModule(inputLayer)
        rnn.addModule(hiddenLayer)
        rnn.addOutputModule(outputLayer)
        rnn_type=rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.RNN_Type
        in_to_hidden=FullConnection(inputLayer,hiddenLayer)
        hidden_to_outputLayer=FullConnection(hiddenLayer,outputLayer)
        rnn.addConnection(in_to_hidden)
        rnn.addConnection(hidden_to_outputLayer)

        if rnn_type=='Elman':
                hidden_to_hidden=FullConnection(hiddenLayer,hiddenLayer, name='c3')
                rnn.addRecurrentConnection(hidden_to_hidden)
        #hidden_to_hidden=FullConnection(hiddenLayer,hiddenLayer, name='c3')

        if rnn_type=='Jordan':
                output_to_hidden=FullConnection(outputLayer,hiddenLayer, name='c3')
                rnn.addRecurrentConnection(output_to_hidden)
                


        #rnn.addRecurrentConnection(hidden_to_hidden)
        momentum=rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.momentum
        weightdecay=rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.learningRate
        rnn.sortModules()
        trainer=BackpropTrainer(rnn,dataset=trdata,momentum=0.1,verbose=True,weightdecay=0.01)
        trainer.train();
        result=(percentError(trainer.testOnClassData(dataset=trdata),trdata['class']))
        #result1=percentError(trainer.testOnClassData(dataset=tsdata1),tsdata1['class'])

        print ('%f \n') % (100-result)
        #print ('%f \n') % (100-result1)

        ts=time.time()
        directory = output_location + sep + str(int(ts))
        makedirs(directory)
        fileObject=open(output_location + sep + str(int(ts)) + sep + 'pybrain_RNN','w')
        pickle.dump(trainer,fileObject)
        pickle.dump(rnn,fileObject)
        fileObject.close()
Exemple #42
0
#inLayer = LinearLayer(ds.indim)
#hiddenLayer = SigmoidLayer(5)
#outLayer = SoftmaxLayer(ds.outdim)
#net.addInputModule(inLayer)
#net.addModule(hiddenLayer)
#net.addOutputModule(outLayer)
#from pybrain.structure import FullConnection
#in_to_hidden = FullConnection(inLayer, hiddenLayer)
#hidden_to_out = FullConnection(hiddenLayer, outLayer)
#net.addConnection(in_to_hidden)
#net.addConnection(hidden_to_out)
#net.sortModules()
#net = buildNetwork(ds.indim, 2, ds.outdim, outclass=SoftmaxLayer)

from pybrain.structure import RecurrentNetwork
net = RecurrentNetwork()
net.addInputModule(LinearLayer(ds.indim, name='inLayer'))
net.addModule(SigmoidLayer(ds.indim, name='hiddenLayer'))
net.addOutputModule(SoftmaxLayer(ds.outdim, name='outLayer'))
net.addConnection(FullConnection(net['inLayer'], net['hiddenLayer'], name='in_to_hidden'))
net.addConnection(FullConnection(net['hiddenLayer'], net['outLayer'], name='hidden_to_out'))
net.addRecurrentConnection(FullConnection(net['hiddenLayer'], net['hiddenLayer'], name='hidden_to_hidden'))
net.sortModules()

#Train net
from pybrain.supervised.trainers import BackpropTrainer
trainer = BackpropTrainer(net, ds, momentum=0.1, verbose=True, weightdecay=0.01)

#for i in range(10):
#    if i%20==0:
#        print i
def buildMixedNestedNetwork():
    """ build a nested network with the inner one being a ffn and the outer one being recurrent. """
    N = RecurrentNetwork('outer')
    a = LinearLayer(1, name = 'a')
    b = LinearLayer(2, name = 'b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a,b))
    N.addConnection(FullConnection(b,c))
    N.addRecurrentConnection(FullConnection(c,c))
    N.sortModules()
    return N
Exemple #44
0
class LanguageLearner:

	__OUTPUT = "Sample at {0} epochs (prompt=\"{1}\", length={2}): {3}"

	def __init__(self, trainingText, hiddenLayers, hiddenNodes):
		self.__initialized = False
		with open(trainingText) as f:
			self.raw = f.read()
		self.characters = list(self.raw)
		self.rawData = list(map(ord, self.characters))
		print("Creating alphabet mapping...")
		self.mapping = []
		for charCode in self.rawData:
			if charCode not in self.mapping:
				self.mapping.append(charCode)
		print("Mapping of " + str(len(self.mapping)) + " created.")
		print(str(self.mapping))
		print("Converting data to mapping...")
		self.data = []
		for charCode in self.rawData:
			self.data.append(self.mapping.index(charCode))
		print("Done.")
		self.dataIn = self.data[:-1:]
		self.dataOut = self.data[1::]
		self.inputs = 1
		self.hiddenLayers = hiddenLayers
		self.hiddenNodes = hiddenNodes
		self.outputs = 1

	def initialize(self, verbose):
		print("Initializing language learner...")
		self.verbose = verbose

		# Create network and modules
		self.net = RecurrentNetwork()
		inp = LinearLayer(self.inputs, name="in")
		hiddenModules = []
		for i in range(0, self.hiddenLayers):
			hiddenModules.append(LSTMLayer(self.hiddenNodes, name=("hidden-" + str(i + 1))))
		outp = LinearLayer(self.outputs, name="out")

		# Add modules to the network with recurrence
		self.net.addOutputModule(outp)
		self.net.addInputModule(inp)
		
		for module in hiddenModules:
			self.net.addModule(module)

		# Create connections

		self.net.addConnection(FullConnection(self.net["in"], self.net["hidden-1"]))
		for i in range(0, len(hiddenModules) - 1):
			self.net.addConnection(FullConnection(self.net["hidden-" + str(i + 1)], self.net["hidden-" + str(i + 2)]))
			self.net.addRecurrentConnection(FullConnection(self.net["hidden-" + str(i + 1)], self.net["hidden-" + str(i + 1)]))
		self.net.addRecurrentConnection(FullConnection(self.net["hidden-" + str(len(hiddenModules))],
			self.net["hidden-" + str(len(hiddenModules))]))
		self.net.addConnection(FullConnection(self.net["hidden-" + str(len(hiddenModules))], self.net["out"]))
		self.net.sortModules()

		self.trainingSet = SequentialDataSet(self.inputs, self.outputs)
		for x, y in zip(self.dataIn, self.dataOut):
			self.trainingSet.newSequence()
			self.trainingSet.appendLinked([x], [y])

		self.net.randomize()

		print("Neural network initialzed with structure:")
		print(self.net)

		self.trainer = BackpropTrainer(self.net, self.trainingSet, verbose=verbose)
		self.__initialized = True
		print("Successfully initialized network.")

	def train(self, epochs, frequency, prompt, length):
		if not self.__initialized:
			raise Exception("Attempted to train uninitialized LanguageLearner")
		print ("Beginning training for " + str(epochs) + " epochs...")
		if frequency >= 0:
			print(LanguageLearner.__OUTPUT.format(0, prompt, length, self.sample(prompt, length)))
		for i in range(1, epochs):
			print("Error at " + str(i) + " epochs: " + str(self.trainer.train()))
			if i % frequency == 0:
				print(LanguageLearner.__OUTPUT.format(i, prompt, length, self.sample(prompt, length)))
		print("Completed training.")

	def sample(self, prompt, length):
		self.net.reset()
		if prompt == None:
			prompt = chr(random.choice(self.mapping))
		output = prompt
		charCode = ord(prompt)
		for i in range(0, length):
			sampledResult = self.net.activate([charCode])
			charCode = int(round(sampledResult[0]))
			if charCode < 0 or charCode >= len(self.mapping):
				return output + "#TERMINATED_SAMPLE(reason: learner guessed invalid character)"
			output += chr(self.mapping[charCode])
		return output
Exemple #45
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
Exemple #46
0
			intuple.append(int(item))
		for item in sentence[0]:
			if int(item) < 1:
				outtuple.append(-1)
			else:
				outtuple.append(int(item))
		print intuple, outtuple
		result = rnet.activate(intuple)
	if (result != sentence[0])
		print "FAIL:",sentence
		exit(0)

#basically main
if __name__ == "__main__":
	#Initialize the ANN
	rnet = RecurrentNetwork()

	rnet.addInputModule(LinearLayer(8, name="word_hash"))
	rnet.addInputModule(LinearLayer(6, name="word_type"))
	rnet.addModule(LSTMLayer(6, name="hidden"))
	rnet.addOutputModule(TanhLayer(6, name="out"))

	rnet.addConnection(FullConnection(rnet["word_type"], rnet["hidden"]))
	rnet.addRecurrentConnection(FullConnection(rnet["hidden"], rnet["hidden"]))
	rnet.addConnection(FullConnection(rnet["hidden"], rnet["out"]))
	rnet.addConnection(FullConnection(rnet["word_hash"], rnet["out"]))

	rnet.sortModules()
	
	#Initialize the dataset
	sds = SequenceClassificationDataSet(14,6)
Exemple #47
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer

duration = 1

num_coeff = 26
max_freq = 8000
min_freq = 0
melArray = np.linspace(FEXT.freqToMel(min_freq), FEXT.freqToMel(max_freq),
                       num_coeff + 2)
ferqArray = FEXT.melToFreq(melArray)
freqArray_bin = np.floor(513 * ferqArray / 16000)
centralPoints = freqArray_bin[1:21]
freqbank = np.zeros((26, 257))

LSTMre = RecurrentNetwork()

LSTMre.addInputModule(LinearLayer(39, name='input'))
LSTMre.addModule(LSTMLayer(50, name='LSTM_hidden'))
LSTMre.addOutputModule(SoftmaxLayer(5, name='out'))
LSTMre.addConnection(
    FullConnection(LSTMre['input'], LSTMre['LSTM_hidden'], name='c1'))
LSTMre.addConnection(
    FullConnection(LSTMre['LSTM_hidden'], LSTMre['out'], name='c2'))
LSTMre.addRecurrentConnection(
    FullConnection(LSTMre['LSTM_hidden'], LSTMre['LSTM_hidden'], name='c3'))
LSTMre.sortModules()
ds = SupervisedDataSet(39, 5)

#ser.
Exemple #49
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
Exemple #50
0
def buildParityNet():
    net = RecurrentNetwork()
    net.addInputModule(LinearLayer(1, name = 'i'))
    net.addModule(TanhLayer(2, name = 'h'))
    net.addModule(BiasUnit('bias'))
    net.addOutputModule(TanhLayer(1, name = 'o'))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['o']))
    net.addConnection(FullConnection(net['h'], net['o']))
    net.addRecurrentConnection(FullConnection(net['o'], net['h']))
    net.sortModules()

    p = net.params
    p[:] = [-0.5, -1.5, 1, 1, -1, 1, 1, -1, 1]
    p *= 10.

    return net
Exemple #51
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size+1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
Exemple #52
0
def getNetwork(trndata):
    n = RecurrentNetwork()
    n.addInputModule(LinearLayer(trndata.indim, name='in'))
    n.addModule(SigmoidLayer(100, name='hidden'))
    n.addOutputModule(LinearLayer(trndata.outdim, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(
        FullConnection(n['hidden'], n['hidden'], name='c3'))
    n.sortModules()

    # fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
    trainer = BackpropTrainer(n,
                              dataset=trndata,
                              momentum=0.1,
                              verbose=True,
                              weightdecay=0.01)

    # TODO: return network and trainer here. Make another function for training
    # for i in range(20):
    # trainer.trainEpochs(1)
    # trainer.trainUntilConvergence(maxEpochs=100)

    # trnresult = percentError( trainer.testOnClassData(),trndata['class'] )
    # tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] )

    # print "epoch: %4d" % trainer.totalepochs, \
    # 	"  train error: %5.2f%%" % trnresult

    # out = fnn.activateOnDataset(tstdata)
    # out = out.argmax(axis=1)  # the highest output activation gives the class
    return (n, trainer)
Exemple #53
0
all weights of the network at once. """

print hidden2out.params
print n.params

""" The former are the last slice of the latter. """

print n.params[-3:] == hidden2out.params

""" Ok, after having covered the basics, let's move on to some additional concepts.
First of all, we encourage you to name all modules, or connections you create, because that gives you
more readable printouts, and a very concise way of accessing them.

We now build an equivalent network to the one before, but with a more concise syntax:
"""
n2 = RecurrentNetwork(name='net2')
n2.addInputModule(LinearLayer(2, name='in'))
n2.addModule(SigmoidLayer(3, name='h'))
n2.addOutputModule(LinearLayer(1, name='out'))
n2.addConnection(FullConnection(n2['in'], n2['h'], name='c1'))
n2.addConnection(FullConnection(n2['h'], n2['out'], name='c2'))
n2.sortModules()

""" Printouts look more concise and readable: """
print n2

""" There is an even quicker way to build networks though, as long as their structure is nothing
more fancy than a stack of fully connected layers: """

n3 = buildNetwork(2, 3, 1, bias=False)
Exemple #54
0
def buildSimpleLSTMNetwork(peepholes = False):
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(100, name = 'i')
    h = LSTMLayer(10, peepholes = peepholes, name = 'lstm')
    o = LinearLayer(1, name = 'o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, name = 'f1'))
    N.addConnection(FullConnection(b, h, name = 'f2'))
    N.addRecurrentConnection(FullConnection(h, h, name = 'r1'))
    N.addConnection(FullConnection(h, o, name = 'r1'))
    N.sortModules()
    return N
Exemple #55
0
from pybrain.rl.environments.timeseries.maximizereturntask import MaximizeReturnTask
from pybrain.rl.environments.timeseries.timeseries import MonthlySnPEnvironment
from pybrain.rl.learners.directsearch.rrl import RRL

from pybrain.structure import RecurrentNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, BiasUnit
from pybrain.structure import FullConnection
from pybrain.rl.agents import LearningAgent
from pybrain.rl.experiments import EpisodicExperiment

from numpy import sign, round
from matplotlib import pyplot

net= RecurrentNetwork()
#Single linear layer with bias unit, and single tanh layer. the linear layer is whats optimised
net.addInputModule(BiasUnit(name='bias'))
net.addOutputModule(TanhLayer(1, name='out'))
net.addRecurrentConnection(FullConnection(net['out'], net['out'], name='c3'))
net.addInputModule(LinearLayer(1,name='in'))
net.addConnection(FullConnection(net['in'],net['out'],name='c1'))
net.addConnection((FullConnection(net['bias'],net['out'],name='c2')))
net.sortModules()
net._setParameters([-8.79227886e-02, -8.29319017e+02, 1.25946474e+00])
print(net._params)
env=MonthlySnPEnvironment()
task=MaximizeReturnTask(env)
learner = RRL() # ENAC() #Q_LinFA(2,1)
agent = LearningAgent(net,learner)
exp=EpisodicExperiment(task,agent)

exp.doEpisodes(10)
def buildNonGravityNet(recurrent = False):
    if recurrent:
        net = RecurrentNetwork()
    else:
        net = FeedForwardNetwork()
    l1 = LinearLayer(2)
    l2 = LinearLayer(3)
    s1 = SigmoidLayer(2)
    l3 = LinearLayer(1)
    net.addInputModule(l1)
    net.addModule(l2)
    net.addModule(s1)
    net.addOutputModule(l3)
    net.addConnection(IdentityConnection(l1, l2, outSliceFrom = 1))
    net.addConnection(IdentityConnection(l1, l2, outSliceTo = 2))
    net.addConnection(IdentityConnection(l2, l3, inSliceFrom = 2))
    net.addConnection(IdentityConnection(l2, l3, inSliceTo = 1))
    net.addConnection(IdentityConnection(l1, s1))
    net.addConnection(IdentityConnection(l2, s1, inSliceFrom = 1))
    net.addConnection(IdentityConnection(s1, l3, inSliceFrom = 1))
    if recurrent:
        net.addRecurrentConnection(IdentityConnection(s1, l1))
        net.addRecurrentConnection(IdentityConnection(l2, l2, inSliceFrom = 1, outSliceTo = 2))
    net.sortModules()
    return net
Exemple #57
0
Xmax=np.asarray([x*1.0 for x in np.amax(X,axis=0)])
#X = (X-Xmin)/(Xmax-Xmin)
X=X/Xmax
Ymin=np.asarray([x*1.0 for x in np.amin(Y,axis=0)])
Ymax=np.asarray([x*1.0 for x in np.amax(Y,axis=0)])
Y = (Y-Ymin)/(Ymax-Ymin)

from pybrain.datasets import SupervisedDataSet
DS = SupervisedDataSet(4, 1)
for i in range(0, Y.size):
        DS.addSample((X[i][0], X[i][1], X[i][2], X[i][3]), (float(Y[i]),))

## ----------------------- ANN ---------------------------- ##

from pybrain.structure import RecurrentNetwork
n = RecurrentNetwork()

from pybrain.structure import LinearLayer, SigmoidLayer
from pybrain.structure import FullConnection

n.addInputModule(SigmoidLayer(4, name='in'))
n.addModule(SigmoidLayer(3, name='hidden'))
n.addOutputModule(LinearLayer(1, name='out'))
n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

n.sortModules() #initialisation


## ----------------------- Trainer ---------------------------- ##
Exemple #58
0
def testTraining():
    # the AnBnCn dataset (sequential)
    d = AnBnCnDataSet()

    # build a recurrent network to be trained
    hsize = 2
    n = RecurrentNetwork()
    n.addModule(TanhLayer(hsize, name='h'))
    n.addModule(BiasUnit(name='bias'))
    n.addOutputModule(LinearLayer(1, name='out'))
    n.addConnection(FullConnection(n['bias'], n['h']))
    n.addConnection(FullConnection(n['h'], n['out']))
    n.addRecurrentConnection(FullConnection(n['h'], n['h']))
    n.sortModules()

    # initialize the backprop trainer and train
    t = BackpropTrainer(n, learningrate=0.1, momentum=0.0, verbose=True)
    t.trainOnDataset(d, 200)

    # the resulting weights are in the network:
    print('Final weights:', n.params)