def buildSharedCrossedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('shared-crossed')
    h = 1
    a = LinearLayer(2, name='a')
    b = LinearLayer(h, name='b')
    c = LinearLayer(h, name='c')
    d = LinearLayer(2, name='d')
    N.addInputModule(a)
    N.addModule(b)
    N.addModule(c)
    N.addOutputModule(d)

    m1 = MotherConnection(h)
    m1.params[:] = scipy.array((1, ))

    m2 = MotherConnection(h)
    m2.params[:] = scipy.array((2, ))

    N.addConnection(SharedFullConnection(m1, a, b, inSliceTo=1))
    N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom=1))
    N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom=1))
    N.addConnection(SharedFullConnection(m2, c, d, outSliceTo=1))
    N.sortModules()
    return N
Ejemplo n.º 2
0
def buildSimpleBorderSwipingNet(size = 3, dim = 3, hsize = 1, predefined = {}):
    """ build a simple swiping network,of given size and dimension, using linear inputs and output"""
    # assuming identical size in all dimensions
    dims = tuple([size]*dim)
    # also includes one dimension for the swipes
    hdims = tuple(list(dims)+[2**dim])
    inmod = LinearLayer(size**dim, name = 'input')
    inmesh = ModuleMesh.viewOnFlatLayer(inmod, dims, 'inmesh')
    outmod = LinearLayer(size**dim, name = 'output')
    outmesh = ModuleMesh.viewOnFlatLayer(outmod, dims, 'outmesh')
    hiddenmesh = ModuleMesh.constructWithLayers(TanhLayer, hsize, hdims, 'hidden')
    return BorderSwipingNetwork(inmesh, hiddenmesh, outmesh, predefined = predefined)
def buildSlicedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('sliced')
    a = LinearLayer(2, name = 'a')
    b = LinearLayer(2, name = 'b')
    N.addInputModule(a)
    N.addOutputModule(b)
    
    N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1))
    N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1))
    N.sortModules()
    return N
Ejemplo n.º 4
0
    def _buildCaptureNetwork(self):
        # the input is a 2D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.insize * self.size * self.size, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.size, self.size),
                                            'inmesh')

        # the output is a 2D-mesh (as a view on a flat sigmoid output layer)
        outmod = self.outcomponentclass(self.outputs * self.size * self.size,
                                        name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.size, self.size),
                                             'outmesh')

        if self.componentclass is MDLSTMLayer:
            c = lambda: MDLSTMLayer(self.hsize, 2, self.peepholes).meatSlice()
            hiddenmesh = ModuleMesh(c, (self.size, self.size, 4),
                                    'hidden',
                                    baserename=True)
        else:
            hiddenmesh = ModuleMesh.constructWithLayers(
                self.componentclass, self.hsize, (self.size, self.size, 4),
                'hidden')

        self._buildBorderStructure(inmesh, hiddenmesh, outmesh)

        # add the identity connections for the states
        for m in self.modules:
            if isinstance(m, MDLSTMLayer):
                tmp = m.stateSlice()
                index = 0
                for c in list(self.connections[m]):
                    if isinstance(c.outmod, MDLSTMLayer):
                        self.addConnection(
                            IdentityConnection(
                                tmp,
                                c.outmod.stateSlice(),
                                outSliceFrom=self.hsize * (index),
                                outSliceTo=self.hsize * (index + 1)))
                        index += 1
        # direct connections between input and output
        if self.directlink:
            self._buildDirectLink(inmesh, outmesh)

        # combined inputs
        if self.combinputs > 0:
            cin = LinearLayer(self.combinputs, name='globalin')
            self.addInputModule(cin)
            if 'globalinconn' not in self.predefined:
                self.predefined['globalinconn'] = MotherConnection(
                    cin.componentOutdim * hiddenmesh.componentIndim,
                    'globalinconn')
            self._linkToAll(cin, hiddenmesh, self.predefined['globalinconn'])
Ejemplo n.º 5
0
def buildNestedNetwork():
    """ build a nested network. """
    N = FeedForwardNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.sortModules()
    return N
Ejemplo n.º 6
0
def buildMixedNestedNetwork():
    """ build a nested network with the inner one being a ffn and the outer one being recurrent. """
    N = RecurrentNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.addRecurrentConnection(FullConnection(c, c))
    N.sortModules()
    return N
def buildSimpleLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(1, name='i')
    h = LSTMLayer(1, peepholes=peepholes, name='lstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, name='f1'))
    N.addConnection(FullConnection(b, h, name='f2'))
    N.addRecurrentConnection(FullConnection(h, h, name='r1'))
    N.addConnection(FullConnection(h, o, name='r1'))
    N.sortModules()
    return N
Ejemplo n.º 8
0
def buildSimpleMDLSTMNetwork(peepholes = False):
    N = RecurrentNetwork('simpleMDLstmNet')  
    i = LinearLayer(1, name = 'i')
    dim = 1
    h = MDLSTMLayer(dim, peepholes = peepholes, name = 'MDlstm')
    o = LinearLayer(1, name = 'o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, outSliceTo = 4*dim, name = 'f1'))
    N.addConnection(FullConnection(b, h, outSliceTo = 4*dim, name = 'f2'))
    N.addRecurrentConnection(FullConnection(h, h, inSliceTo = dim, outSliceTo = 4*dim, name = 'r1'))
    N.addRecurrentConnection(IdentityConnection(h, h, inSliceFrom = dim, outSliceFrom = 4*dim, name = 'rstate'))
    N.addConnection(FullConnection(h, o, inSliceTo = dim, name = 'f3'))
    N.sortModules()
    return N
Ejemplo n.º 9
0
def buildCyclicNetwork(recurrent):
    """ build a cyclic network with 4 modules
    
    :key recurrent: make one of the connections recurrent """
    Network = RecurrentNetwork if recurrent else FeedForwardNetwork
    N = Network('cyc')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = LinearLayer(3, name='c')
    d = LinearLayer(4, name='d')
    N.addInputModule(a)
    N.addModule(b)
    N.addModule(d)
    N.addOutputModule(c)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.addConnection(FullConnection(c, d))
    if recurrent:
        N.addRecurrentConnection(FullConnection(d, a))
    else:
        N.addConnection(FullConnection(d, a))
    N.sortModules()
    return N
Ejemplo n.º 10
0
    def __init__(self, dims, **args):
        """ The one required argument specifies the sizes of each dimension (minimum 2) """

        SwipingNetwork.__init__(self, dims=dims, **args)

        pdims = product(dims)
        # the input is a 2D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.insize * pdims, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, dims, 'inmesh')

        # the output is a 2D-mesh (as a view on a flat sigmoid output layer)
        outmod = self.outcomponentclass(self.outputs * pdims, name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, dims, 'outmesh')

        if self.componentclass is MDLSTMLayer:
            c = lambda: MDLSTMLayer(self.hsize, 2, self.peepholes).meatSlice()
            adims = tuple(list(dims) + [4])
            hiddenmesh = ModuleMesh(c, adims, 'hidden', baserename=True)
        else:
            hiddenmesh = ModuleMesh.constructWithLayers(
                self.componentclass, self.hsize,
                tuple(list(dims) + [self.swipes]), 'hidden')

        self._buildSwipingStructure(inmesh, hiddenmesh, outmesh)

        # add the identity connections for the states
        for m in self.modules:
            if isinstance(m, MDLSTMLayer):
                tmp = m.stateSlice()
                index = 0
                for c in list(self.connections[m]):
                    if isinstance(c.outmod, MDLSTMLayer):
                        self.addConnection(
                            IdentityConnection(
                                tmp,
                                c.outmod.stateSlice(),
                                outSliceFrom=self.hsize * (index),
                                outSliceTo=self.hsize * (index + 1)))
                        index += 1

        self.sortModules()
Ejemplo n.º 11
0
from pybrain3 import FeedForwardNetwork, LinearLayer, SigmoidLayer, BiasUnit, FullConnection, RecurrentNetwork, \
    TanhLayer

#network = RecurrentNetwork()  # nowa siec
network=FeedForwardNetwork();

inLayer = LinearLayer(35)  # warstwa wejsciowa
#hiddenLayer = SigmoidLayer(5)  # tworze ukryta warstwe
hiddenLayer=TanhLayer(5);
outLayer = LinearLayer(1)  # tworze warstwe wyjsciowa
bias = BiasUnit()  # inicjalizuje Bias

network.addInputModule(inLayer)
network.addModule(bias)
network.addModule(hiddenLayer)
network.addOutputModule(outLayer)

bias_to_hidden = FullConnection(bias, hiddenLayer)  # lacze warstwy
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)

network.addConnection(bias_to_hidden)  # dodaje polaczenie do sieci
network.addConnection(in_to_hidden)
network.addConnection(hidden_to_out)

network.sortModules()
Ejemplo n.º 12
0
def buildSomeModules(number = 4):
    res = []
    for i in range(number):
        res.append(LinearLayer(1, 'l'+str(i)))
    return res