def buildSharedCrossedNetwork(): """ build a network with shared connections. Two hidden modules are symmetrically linked, but to a different input neuron than the output neuron. The weights are random. """ N = FeedForwardNetwork('shared-crossed') h = 1 a = LinearLayer(2, name = 'a') b = LinearLayer(h, name = 'b') c = LinearLayer(h, name = 'c') d = LinearLayer(2, name = 'd') N.addInputModule(a) N.addModule(b) N.addModule(c) N.addOutputModule(d) m1 = MotherConnection(h) m1.params[:] = scipy.array((1,)) m2 = MotherConnection(h) m2.params[:] = scipy.array((2,)) N.addConnection(SharedFullConnection(m1, a, b, inSliceTo = 1)) N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom = 1)) N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom = 1)) N.addConnection(SharedFullConnection(m2, c, d, outSliceTo = 1)) N.sortModules() return N
def _buildDirectLink(self, inmesh, outmesh): if not 'directconn' in self.predefined: self.predefined['directconn'] = MotherConnection( inmesh.componentOutdim * outmesh.componentIndim, 'inconn') for unit in self._iterateOverUnits(): self.addConnection( SharedFullConnection(self.predefined['directconn'], inmesh[unit], outmesh[unit]))
def buildSharedCrossedNetwork(): """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different input neuron than the output neuron. The weights are random. """ N = FeedForwardNetwork('shared-crossed') h = 1 a = LinearLayer(2, name='a') b = LinearLayer(h, name='b') c = LinearLayer(h, name='c') d = LinearLayer(2, name='d') N.addInputModule(a) N.addModule(b) N.addModule(c) N.addOutputModule(d) m1 = MotherConnection(h) m1.params[:] = scipy.array((1, )) m2 = MotherConnection(h) m2.params[:] = scipy.array((2, )) N.addConnection(SharedFullConnection(m1, a, b, inSliceTo=1)) N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom=1)) N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom=1)) N.addConnection(SharedFullConnection(m2, c, d, outSliceTo=1)) N.sortModules() return N
def _buildCaptureNetwork(self): # the input is a 2D-mesh (as a view on a flat input layer) inmod = LinearLayer(self.insize * self.size * self.size, name='input') inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.size, self.size), 'inmesh') # the output is a 2D-mesh (as a view on a flat sigmoid output layer) outmod = self.outcomponentclass(self.outputs * self.size * self.size, name='output') outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.size, self.size), 'outmesh') if self.componentclass is MDLSTMLayer: c = lambda: MDLSTMLayer(self.hsize, 2, self.peepholes).meatSlice() hiddenmesh = ModuleMesh(c, (self.size, self.size, 4), 'hidden', baserename=True) else: hiddenmesh = ModuleMesh.constructWithLayers( self.componentclass, self.hsize, (self.size, self.size, 4), 'hidden') self._buildBorderStructure(inmesh, hiddenmesh, outmesh) # add the identity connections for the states for m in self.modules: if isinstance(m, MDLSTMLayer): tmp = m.stateSlice() index = 0 for c in list(self.connections[m]): if isinstance(c.outmod, MDLSTMLayer): self.addConnection( IdentityConnection( tmp, c.outmod.stateSlice(), outSliceFrom=self.hsize * (index), outSliceTo=self.hsize * (index + 1))) index += 1 # direct connections between input and output if self.directlink: self._buildDirectLink(inmesh, outmesh) # combined inputs if self.combinputs > 0: cin = LinearLayer(self.combinputs, name='globalin') self.addInputModule(cin) if 'globalinconn' not in self.predefined: self.predefined['globalinconn'] = MotherConnection( cin.componentOutdim * hiddenmesh.componentIndim, 'globalinconn') self._linkToAll(cin, hiddenmesh, self.predefined['globalinconn'])