示例#1
0
    def buildNN(self, net, functions, inp, out):
        layers = []

        inLayer = self.func[functions[0]](inp)
        layers.append(inLayer)
        outLayer = self.func[functions[-1]](out)

        for neural in range(1, len(net) - 1):
            layers.append(self.func[functions[neural]](1))
        layers.append(outLayer)

        connections, recConnections = self.fillConnections(net, [], [0], layers)
        if len(recConnections) == 0:
            n = FeedForwardNetwork()
        else:
            n = RecurrentNetwork()
        n.addInputModule(inLayer)
        for layer in range(1, len(layers) - 1):
            n.addModule(layers[layer])
        n.addOutputModule(outLayer)

        for con in connections:
            n.addConnection(con)
        for rcon in recConnections:
            n.addRecurrentConnection(rcon)
        n.sortModules()
        return n
示例#2
0
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)

# this is required to make the MLP usable 
n.sortModules()

print n.activate((2,2)) # forward pass
print 'n.params\n', n.params # all weights

# same but for recurrent network
n = RecurrentNetwork()
n.addInputModule(LinearLayer(2, name='in'))
n.addModule(SigmoidLayer(3, name='hidden'))
n.addOutputModule(LinearLayer(1, name='out'))
n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
n.addRecurrentConnection(FullConnection(n['hidden'], n['hidden'], name='c3'))

n.sortModules()


print n.activate((2,2)) # forward pass
print n.activate((2,2)) # forward pass
print n.activate((2,2)) # forward pass
print n.reset(), '\nafter reset'
print n.activate((2,2)) # forward pass
print in_to_hidden.params
print hidden_to_out.params
print n.params
print n.activate([1, 2])

# Naming your NN
print LinearLayer(2).name
LinearLayer(2, name='foo')
print LinearLayer(2).name

# Using Recurrent NN
n = RecurrentNetwork()

n.addInputModule(LinearLayer(2, name='in'))
n.addModule(SigmoidLayer(3, name='hidden'))
n.addOutputModule(LinearLayer(1, name='out'))
n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

# Looks back in time one timestep
n.addRecurrentConnection(FullConnection(n['hidden'], n['hidden'], name='c3'))

# Using RNN, every steps gets different value of Neron
n.sortModules()
print n.activate([2, 2])
print n.activate([2, 2])
print n.activate([2, 2])
n.reset()  # Clear n and Reset it
print n.activate([2, 2])
print n.activate([2, 2])
示例#4
0
# print n.activate([1,2])

# print weights
# print in_to_hidden.params

""" -- RECURRENT NETWORK -- """
from pybrain.structure import RecurrentNetwork

n = RecurrentNetwork()

n.addInputModule(LinearLayer(2, name="in"))
n.addModule(SigmoidLayer(3, name="hidden"))
n.addOutputModule(LinearLayer(1, name="out"))
n.addConnection(FullConnection(n["in"], n["hidden"], name="c1"))
n.addConnection(FullConnection(n["hidden"], n["out"], name="c2"))

n.addRecurrentConnection(FullConnection(n["hidden"], n["hidden"], name="c3"))

n.sortModules()

# the result is different every time
# print n.activate((2,2))
# print n.activate((2,2))
# print n.activate((2,2))

# goes back to its original initialization state
n.reset()
# print n.activate((2,2))
# print n.activate((2,2))
# print n.activate((2,2))