예제 #1
0
파일: gate.py 프로젝트: davidmiller/pybrain
    def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
        dim = self.indim / 2
        in0 = inbuf[:dim]
        in1 = inbuf[dim:]
        out0 = outerr[:dim]
        out1 = outerr[dim:]
        inerr[:dim] += sigmoidPrime(in0) * in1 * out0
        inerr[dim:] += sigmoid(in0) * out0

        inerr[:dim] -= sigmoidPrime(in0) * in1 * out1
        inerr[dim:] += (1 - sigmoid(in0)) * out1
예제 #2
0
    def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
        dim = self.indim // 2
        in0 = inbuf[:dim]
        in1 = inbuf[dim:]
        out0 = outerr[:dim]
        out1 = outerr[dim:]
        inerr[:dim] += sigmoidPrime(in0) * in1 * out0
        inerr[dim:] += sigmoid(in0) * out0

        inerr[:dim] -= sigmoidPrime(in0) * in1 * out1
        inerr[dim:] += (1 - sigmoid(in0)) * out1
예제 #3
0
 def getDistribution(self, input):
     input = self._appendBiasTerm(input)
     mul = self.thetas * input
     posProb = sigmoid(mul.sum())
     negProb = 1 - posProb
     
     return array([negProb, posProb])
예제 #4
0
파일: plot.py 프로젝트: crcollins/ml-class
def plot_neural_net(X, y, clf, segment=False):
    values = X
    pca_plot(X, y, save="00_conn.png", segment=segment)
    counter = 1
    for i, layer in enumerate(clf.nn.modulesSorted):
        name = layer.__class__.__name__
        if name == "BiasUnit":
            continue

        try:
            conn = clf.nn.connections[layer][0]
        except IndexError:
            continue

        if "Linear" not in name:
            if "Sigmoid" in name:
                add = "sigmoid"
                values = sigmoid(values)
            elif "Tanh" in name:
                add = "tanh"
                values = tanh(values)
            pca_plot(values, y, save="%02d_conn_%s.png" % (counter, add), segment=segment)
            counter += 1
        shape = (conn.outdim, conn.indim)
        temp = numpy.dot(numpy.reshape(conn.params, shape), values.T)
        pca_plot(temp.T, y, save="%02d_conn.png" % counter, segment=segment)
        counter += 1
        values = temp.T
예제 #5
0
    def testDistribution(self):
        input = [1, 2]
        thetas = [0.5, 0.4, 0.3]
        classifier = _LogisticRegression(thetas)

        distribution = classifier.getDistribution(input)
        posProbability = sigmoid(0.5 * 1 + 0.4 * 1 + 0.3 * 2)
        expectedDistribution = [1 - posProbability, posProbability]

        assertListAlmostEqual(self, distribution, expectedDistribution, 0.0001)
예제 #6
0
 def calculateDerivatives(thetas):
     derivatives = []
     for j in range(len(thetas)):
         sum = 0
         
         for i in range(Y.shape[0]):
             y = Y[i][0]                    
             x = X[i]
             
             mul = thetas * x
             posProb = sigmoid(mul.sum())
             
             sum += (posProb - y) * X[i][j]
             
         jDerivative = sum / Y.shape[0]
         derivatives.append(jDerivative)
         
     return derivatives
예제 #7
0
 def costFunction(thetas):
     totalCost = 0
     for i in range(Y.shape[0]):
         y = Y[i][0]                    
         x = X[i]
         
         mul = thetas * x
         posProb = sigmoid(mul.sum())
         
         assert y == 1 or y == 0
         
         if y == 1:
             cost = -log(posProb)
         else:
             cost = -log(1 - posProb)
             
         totalCost += cost
         
     regLambda = 0.0001
     regularizationSum = regLambda * (thetas ** 2).sum() / (2 * Y.shape[0])
     
     return totalCost / Y.shape[0] + regularizationSum
예제 #8
0
파일: gate.py 프로젝트: davidmiller/pybrain
 def _forwardImplementation(self, inbuf, outbuf):
     outbuf += sigmoid(inbuf[:self.outdim]) * inbuf[self.outdim:]
예제 #9
0
class LSTMLayer(NeuronLayer, ParameterContainer):
    """Long short-term memory cell layer.
    
    The input consists of 4 parts, in the following order:
    - input gate
    - forget gate
    - cell input
    - output gate
    
    """

    sequential = True
    peepholes = False
    maxoffset = 0

    # Transfer functions and their derivatives
    f = lambda _, x: sigmoid(x)
    fprime = lambda _, x: sigmoidPrime(x)
    g = lambda _, x: tanh(x)
    gprime = lambda _, x: tanhPrime(x)
    h = lambda _, x: tanh(x)
    hprime = lambda _, x: tanhPrime(x)

    def __init__(self, dim, peepholes=False, name=None):
        """ 
        :arg dim: number of cells
        :key peepholes: enable peephole connections (from state to gates)? """
        self.setArgs(dim=dim, peepholes=peepholes)

        # Internal buffers, created dynamically:
        self.bufferlist = [
            ('ingate', dim),
            ('outgate', dim),
            ('forgetgate', dim),
            ('ingatex', dim),
            ('outgatex', dim),
            ('forgetgatex', dim),
            ('state', dim),
            ('ingateError', dim),
            ('outgateError', dim),
            ('forgetgateError', dim),
            ('stateError', dim),
        ]

        Module.__init__(self, 4 * dim, dim, name)
        if self.peepholes:
            ParameterContainer.__init__(self, dim * 3)
            self._setParameters(self.params)
            self._setDerivatives(self.derivs)

    def _setParameters(self, p, owner=None):
        ParameterContainer._setParameters(self, p, owner)
        dim = self.outdim
        self.ingatePeepWeights = self.params[:dim]
        self.forgetgatePeepWeights = self.params[dim:dim * 2]
        self.outgatePeepWeights = self.params[dim * 2:]

    def _setDerivatives(self, d, owner=None):
        ParameterContainer._setDerivatives(self, d, owner)
        dim = self.outdim
        self.ingatePeepDerivs = self.derivs[:dim]
        self.forgetgatePeepDerivs = self.derivs[dim:dim * 2]
        self.outgatePeepDerivs = self.derivs[dim * 2:]

    def _isLastTimestep(self):
        """Tell wether the current offset is the maximum offset."""
        return self.maxoffset == self.offset

    def _forwardImplementation(self, inbuf, outbuf):
        self.maxoffset = max(self.offset + 1, self.maxoffset)

        dim = self.outdim
        # slicing the input buffer into the 4 parts
        try:
            self.ingatex[self.offset] = inbuf[:dim]
        except IndexError:
            raise str((self.offset, self.ingatex.shape))

        self.forgetgatex[self.offset] = inbuf[dim:dim * 2]
        cellx = inbuf[dim * 2:dim * 3]
        self.outgatex[self.offset] = inbuf[dim * 3:]

        # peephole treatment
        if self.peepholes and self.offset > 0:
            self.ingatex[self.offset] += self.ingatePeepWeights * self.state[
                self.offset - 1]
            self.forgetgatex[
                self.offset] += self.forgetgatePeepWeights * self.state[
                    self.offset - 1]

        self.ingate[self.offset] = self.f(self.ingatex[self.offset])
        self.forgetgate[self.offset] = self.f(self.forgetgatex[self.offset])

        self.state[self.offset] = self.ingate[self.offset] * self.g(cellx)
        if self.offset > 0:
            self.state[self.offset] += self.forgetgate[
                self.offset] * self.state[self.offset - 1]

        if self.peepholes:
            self.outgatex[self.offset] += self.outgatePeepWeights * self.state[
                self.offset]
        self.outgate[self.offset] = self.f(self.outgatex[self.offset])

        outbuf[:] = self.outgate[self.offset] * self.h(self.state[self.offset])

    def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
        dim = self.outdim
        cellx = inbuf[dim * 2:dim * 3]

        self.outgateError[self.offset] = self.fprime(
            self.outgatex[self.offset]) * outerr * self.h(
                self.state[self.offset])
        self.stateError[self.offset] = outerr * self.outgate[
            self.offset] * self.hprime(self.state[self.offset])
        if not self._isLastTimestep():
            self.stateError[self.offset] += self.stateError[
                self.offset + 1] * self.forgetgate[self.offset + 1]
            if self.peepholes:
                self.stateError[self.offset] += self.ingateError[
                    self.offset + 1] * self.ingatePeepWeights
                self.stateError[self.offset] += self.forgetgateError[
                    self.offset + 1] * self.forgetgatePeepWeights
        if self.peepholes:
            self.stateError[self.offset] += self.outgateError[
                self.offset] * self.outgatePeepWeights
        cellError = self.ingate[self.offset] * self.gprime(
            cellx) * self.stateError[self.offset]
        if self.offset > 0:
            self.forgetgateError[self.offset] = self.fprime(
                self.forgetgatex[self.offset]) * self.stateError[
                    self.offset] * self.state[self.offset - 1]

        self.ingateError[self.offset] = self.fprime(self.ingatex[
            self.offset]) * self.stateError[self.offset] * self.g(cellx)

        # compute derivatives
        if self.peepholes:
            self.outgatePeepDerivs += self.outgateError[
                self.offset] * self.state[self.offset]
            if self.offset > 0:
                self.ingatePeepDerivs += self.ingateError[
                    self.offset] * self.state[self.offset - 1]
                self.forgetgatePeepDerivs += self.forgetgateError[
                    self.offset] * self.state[self.offset - 1]

        inerr[:dim] = self.ingateError[self.offset]
        inerr[dim:dim * 2] = self.forgetgateError[self.offset]
        inerr[dim * 2:dim * 3] = cellError
        inerr[dim * 3:] = self.outgateError[self.offset]

    def whichNeuron(self, inputIndex=None, outputIndex=None):
        if inputIndex != None:
            return inputIndex % self.dim
        if outputIndex != None:
            return outputIndex
예제 #10
0
 def _forwardImplementation(self, inbuf, outbuf):
     outbuf[:] = sigmoid(inbuf)
예제 #11
0
파일: mdlstm.py 프로젝트: HKou/pybrain
 def f(self, x): return sigmoid(x)
 def fprime(self, x): return sigmoidPrime(x)
예제 #12
0
 def _forwardImplementation(self, inbuf, outbuf):
     outbuf[:] = sigmoid(inbuf)
예제 #13
0
 def _forwardImplementation(self, inbuf, outbuf):
     dim = self.indim // 2
     outbuf[:dim] += sigmoid(inbuf[:dim]) * inbuf[dim:]
     outbuf[dim:] += (1 - sigmoid(inbuf[:dim])) * inbuf[dim:]
예제 #14
0
 def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
     inerr[:self.outdim] += (sigmoidPrime(inbuf[:self.outdim]) *
                             inbuf[self.outdim:] * outerr)
     inerr[self.outdim:] += (sigmoid(inbuf[:self.outdim]) * outerr)
예제 #15
0
 def _forwardImplementation(self, inbuf, outbuf):
     outbuf += sigmoid(inbuf[:self.outdim]) * inbuf[self.outdim:]
예제 #16
0
 def f(self, x): return sigmoid(x)
 def fprime(self, x): return sigmoidPrime(x)
예제 #17
0
파일: limag.py 프로젝트: HKou/pybrain
 def _transformLinkages(self):
     # print "before", self.lm
     self.lm = sigmoid(self.rawlm)
예제 #18
0
 def _transformLinkages(self):
     #print "before", self.lm
     self.lm = sigmoid(self.rawlm)                        
예제 #19
0
파일: gate.py 프로젝트: davidmiller/pybrain
 def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
     inerr[:self.outdim] += (sigmoidPrime(inbuf[:self.outdim])  
                             * inbuf[self.outdim:]
                             * outerr)
     inerr[self.outdim:] += (sigmoid(inbuf[:self.outdim]) 
                             * outerr)
예제 #20
0
파일: gate.py 프로젝트: davidmiller/pybrain
 def _forwardImplementation(self, inbuf, outbuf):
     dim = self.indim / 2
     outbuf[:dim] += sigmoid(inbuf[:dim]) * inbuf[dim:]
     outbuf[dim:] += (1 - sigmoid(inbuf[:dim])) * inbuf[dim:]
예제 #21
0
 def _forwardImplementation(self, inbuf, outbuf):
     outbuf[:self.indim] += sigmoid(inbuf)
     outbuf[self.indim:] += 1 - sigmoid(inbuf)
예제 #22
0
파일: gate.py 프로젝트: davidmiller/pybrain
 def _forwardImplementation(self, inbuf, outbuf):
     outbuf[:self.indim] += sigmoid(inbuf)
     outbuf[self.indim:] += 1 - sigmoid(inbuf)
예제 #23
0
 def f(self, x):
     return sigmoid(x)
예제 #24
0
 def _forwardImplementation(self, inbuf, outbuf):
     outbuf[:] = sigmoid(inbuf)
     self.saved += outbuf;
     self.numsaved += 1;
예제 #25
0
파일: mdlstm.py 프로젝트: anurive/pybrain
 def f(self, x):
     return sigmoid(x)