Ejemplo n.º 1
0
 def __init__(self, inputdim, insize, convSize, numFeatureMaps, **args):
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim * insize * insize)
     self.addInputModule(inlayer)
     self._buildStructure(inputdim, insize, inlayer, convSize,
                          numFeatureMaps)
     self.sortModules()
Ejemplo n.º 2
0
    def __init__(self, predefined = None, **kwargs):
        """ For the current implementation, the sequence length
        needs to be fixed, and given at construction time. """
        if predefined is not None:
            self.predefined = predefined
        else:
            self.predefined = {}
        FeedForwardNetwork.__init__(self, **kwargs)
        assert self.seqlen is not None

        # the input is a 1D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen,), 'inmesh')

        # the output is also a 1D-mesh
        outmod = self.outcomponentclass(self.outputsize * self.seqlen, name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen,), 'outmesh')

        # the hidden layers are places in a 2xseqlen mesh
        hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hiddensize,
                                                    (2, self.seqlen), 'hidden')

        # add the modules
        for c in inmesh:
            self.addInputModule(c)
        for c in outmesh:
            self.addOutputModule(c)
        for c in hiddenmesh:
            self.addModule(c)

        # set the connections weights to be shared
        inconnf = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
        outconnf = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
        forwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='fconn')
        if self.symmetric:
            backwardconn = forwardconn
            inconnb = inconnf
            outconnb = outconnf
        else:
            backwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='bconn')
            inconnb = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
            outconnb = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')

        # build the connections
        for i in range(self.seqlen):
            # input to hidden
            self.addConnection(SharedFullConnection(inconnf, inmesh[(i,)], hiddenmesh[(0, i)]))
            self.addConnection(SharedFullConnection(inconnb, inmesh[(i,)], hiddenmesh[(1, i)]))
            # hidden to output
            self.addConnection(SharedFullConnection(outconnf, hiddenmesh[(0, i)], outmesh[(i,)]))
            self.addConnection(SharedFullConnection(outconnb, hiddenmesh[(1, i)], outmesh[(i,)]))
            if i > 0:
                # forward in time
                self.addConnection(SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)], hiddenmesh[(0, i)]))
            if i < self.seqlen - 1:
                # backward in time
                self.addConnection(SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)], hiddenmesh[(1, i)]))

        self.sortModules()
Ejemplo n.º 3
0
 def __init__(self, predefined = None, **kwargs):
     """ For the current implementation, the sequence length 
     needs to be fixed, and given at construction time. """
     if predefined is not None:
         self.predefined = predefined
     else:
         self.predefined = {}
     FeedForwardNetwork.__init__(self, **kwargs)
     assert self.seqlen is not None
     
     # the input is a 1D-mesh (as a view on a flat input layer)
     inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
     inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen,), 'inmesh')
     
     # the output is also a 1D-mesh 
     outmod = self.outcomponentclass(self.outputsize * self.seqlen, name='output')
     outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen,), 'outmesh')
     
     # the hidden layers are places in a 2xseqlen mesh
     hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hiddensize,
                                                 (2, self.seqlen), 'hidden')
     
     # add the modules
     for c in inmesh:
         self.addInputModule(c)
     for c in outmesh:
         self.addOutputModule(c)
     for c in hiddenmesh:
         self.addModule(c)
     
     # set the connections weights to be shared
     inconnf = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
     outconnf = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
     forwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='fconn')
     if self.symmetric:
         backwardconn = forwardconn
         inconnb = inconnf
         outconnb = outconnf
     else:
         backwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='bconn')
         inconnb = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
         outconnb = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
     
     # build the connections
     for i in range(self.seqlen):
         # input to hidden
         self.addConnection(SharedFullConnection(inconnf, inmesh[(i,)], hiddenmesh[(0, i)]))
         self.addConnection(SharedFullConnection(inconnb, inmesh[(i,)], hiddenmesh[(1, i)]))
         # hidden to output
         self.addConnection(SharedFullConnection(outconnf, hiddenmesh[(0, i)], outmesh[(i,)]))
         self.addConnection(SharedFullConnection(outconnb, hiddenmesh[(1, i)], outmesh[(i,)]))
         if i > 0:
             # forward in time
             self.addConnection(SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)], hiddenmesh[(0, i)]))
         if i < self.seqlen - 1:
             # backward in time
             self.addConnection(SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)], hiddenmesh[(1, i)]))
         
     self.sortModules()
Ejemplo n.º 4
0
    def __init__(self, boardSize, convSize, numFeatureMaps, **args):
        inputdim = 2
        FeedForwardNetwork.__init__(self, **args)
        inlayer = LinearLayer(inputdim * boardSize * boardSize, name='in')
        self.addInputModule(inlayer)

        # we need some treatment of the border too - thus we pad the direct board input.
        x = convSize / 2
        insize = boardSize + 2 * x
        if convSize % 2 == 0:
            insize -= 1
        paddedlayer = LinearLayer(inputdim * insize * insize, name='pad')
        self.addModule(paddedlayer)

        # we connect a bias to the padded-parts (with shared but trainable weights).
        bias = BiasUnit()
        self.addModule(bias)
        biasConn = MotherConnection(inputdim)

        paddable = []
        if convSize % 2 == 0:
            xs = range(x) + range(insize - x + 1, insize)
        else:
            xs = range(x) + range(insize - x, insize)
        paddable.extend(crossproduct([range(insize), xs]))
        paddable.extend(crossproduct([xs, range(x, boardSize + x)]))

        for (i, j) in paddable:
            self.addConnection(
                SharedFullConnection(biasConn,
                                     bias,
                                     paddedlayer,
                                     outSliceFrom=(i * insize + j) * inputdim,
                                     outSliceTo=(i * insize + j + 1) *
                                     inputdim))

        for i in range(boardSize):
            inmod = ModuleSlice(inlayer,
                                outSliceFrom=i * boardSize * inputdim,
                                outSliceTo=(i + 1) * boardSize * inputdim)
            outmod = ModuleSlice(paddedlayer,
                                 inSliceFrom=((i + x) * insize + x) * inputdim,
                                 inSliceTo=((i + x) * insize + x + boardSize) *
                                 inputdim)
            self.addConnection(IdentityConnection(inmod, outmod))

        self._buildStructure(inputdim, insize, paddedlayer, convSize,
                             numFeatureMaps)
        self.sortModules()
 def __init__(self, boardSize, convSize, numFeatureMaps, **args):
     inputdim = 2
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim*boardSize*boardSize, name = 'in')
     self.addInputModule(inlayer)
     
     # we need some treatment of the border too - thus we pad the direct board input.
     x = convSize/2
     insize = boardSize+2*x
     if convSize % 2 == 0: 
         insize -= 1            
     paddedlayer = LinearLayer(inputdim*insize*insize, name = 'pad')
     self.addModule(paddedlayer)
     
     # we connect a bias to the padded-parts (with shared but trainable weights).
     bias = BiasUnit()
     self.addModule(bias)
     biasConn = MotherConnection(inputdim)
     
     paddable = []
     if convSize % 2 == 0: 
         xs = range(x)+range(insize-x+1, insize)
     else:
         xs = range(x)+range(insize-x, insize)
     paddable.extend(crossproduct([range(insize), xs]))
     paddable.extend(crossproduct([xs, range(x, boardSize+x)]))
     
     for (i, j) in paddable:
         self.addConnection(SharedFullConnection(biasConn, bias, paddedlayer, 
                                                 outSliceFrom = (i*insize+j)*inputdim, 
                                                 outSliceTo = (i*insize+j+1)*inputdim))
             
     for i in range(boardSize):
         inmod = ModuleSlice(inlayer, outSliceFrom = i*boardSize*inputdim, 
                             outSliceTo = (i+1)*boardSize*inputdim)
         outmod = ModuleSlice(paddedlayer, inSliceFrom = ((i+x)*insize+x)*inputdim, 
                              inSliceTo = ((i+x)*insize+x+boardSize)*inputdim)
         self.addConnection(IdentityConnection(inmod, outmod))
         
     self._buildStructure(inputdim, insize, paddedlayer, convSize, numFeatureMaps)
     self.sortModules()
                     
Ejemplo n.º 6
0
 def __init__(self, inputdim, insize, convSize, numFeatureMaps, **args):
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim * insize * insize)
     self.addInputModule(inlayer)
     self._buildStructure(inputdim, insize, inlayer, convSize, numFeatureMaps)
     self.sortModules()
 def __init__(self, **args):
     FeedForwardNetwork.__init__(self, **args)