def buildNonGravityNet(recurrent = False): if recurrent: net = RecurrentNetwork() else: net = FeedForwardNetwork() l1 = LinearLayer(2) l2 = LinearLayer(3) s1 = SigmoidLayer(2) l3 = LinearLayer(1) net.addInputModule(l1) net.addModule(l2) net.addModule(s1) net.addOutputModule(l3) net.addConnection(IdentityConnection(l1, l2, outSliceFrom = 1)) net.addConnection(IdentityConnection(l1, l2, outSliceTo = 2)) net.addConnection(IdentityConnection(l2, l3, inSliceFrom = 2)) net.addConnection(IdentityConnection(l2, l3, inSliceTo = 1)) net.addConnection(IdentityConnection(l1, s1)) net.addConnection(IdentityConnection(l2, s1, inSliceFrom = 1)) net.addConnection(IdentityConnection(s1, l3, inSliceFrom = 1)) if recurrent: net.addRecurrentConnection(IdentityConnection(s1, l1)) net.addRecurrentConnection(IdentityConnection(l2, l2, inSliceFrom = 1, outSliceTo = 2)) net.sortModules() return net
def fromModules(cls, visible, hidden, bias, con, biascon): net = FeedForwardNetwork() net.addInputModule(visible) net.addModule(bias) net.addOutputModule(hidden) net.addConnection(con) net.addConnection(biascon) net.sortModules() return cls(net)
def fromDims(cls, visibledim, hiddendim, params=None, biasParams=None): """Return a restricted Boltzmann machine of the given dimensions with the given distributions.""" net = FeedForwardNetwork() bias = BiasUnit('bias') visible = LinearLayer(visibledim, 'visible') hidden = SigmoidLayer(hiddendim, 'hidden') con1 = FullConnection(visible, hidden) con2 = FullConnection(bias, hidden) if params is not None: con1.params[:] = params if biasParams is not None: con2.params[:] = biasParams net.addInputModule(visible) net.addModule(bias) net.addOutputModule(hidden) net.addConnection(con1) net.addConnection(con2) net.sortModules() return cls(net)
def buildNestedNetwork(): """ build a nested network. """ N = FeedForwardNetwork('outer') a = LinearLayer(1, name='a') b = LinearLayer(2, name='b') c = buildNetwork(2, 3, 1) c.name = 'inner' N.addInputModule(a) N.addModule(c) N.addOutputModule(b) N.addConnection(FullConnection(a, b)) N.addConnection(FullConnection(b, c)) N.sortModules() return N
def train(self): # We will build up a network piecewise in order to create a new dataset # for each layer. dataset = self.dataset piecenet = FeedForwardNetwork() piecenet.addInputModule(copy.deepcopy(self.net.inmodules[0])) # Add a bias bias = BiasUnit() piecenet.addModule(bias) # Add the first visible layer firstRbm = next(self.iterRbms()) visible = copy.deepcopy(firstRbm.visible) piecenet.addModule(visible) # For saving the rbms and their inverses self.invRbms = [] self.rbms = [] for rbm in self.iterRbms(): self.net.sortModules() # Train the first layer with an rbm trainer for `epoch` epochs. trainer = self.trainerKlass(rbm, dataset, self.cfg) for _ in range(self.epochs): trainer.train() self.invRbms.append(trainer.invRbm) self.rbms.append(rbm) # Add the connections and the hidden layer of the rbm to the net. hidden = copy.deepcopy(rbm.hidden) biascon = FullConnection(bias, hidden) biascon.params[:] = rbm.biasWeights con = FullConnection(visible, hidden) con.params[:] = rbm.weights piecenet.addConnection(biascon) piecenet.addConnection(con) piecenet.addModule(hidden) # Overwrite old outputs piecenet.outmodules = [hidden] piecenet.outdim = rbm.hiddenDim piecenet.sortModules() dataset = UnsupervisedDataSet(rbm.hiddenDim) for sample, in self.dataset: new_sample = piecenet.activate(sample) dataset.addSample(new_sample) visible = hidden