示例#1
0
文件: ANN.py 项目: mk2908/WSD
    def _backpropagate(self, expected, learningrate, momentum):
        """Propagate the error of the current output compared with the expected
      output through the network. 
      
      expected -- expected output vector
      learningrate -- learning rate to be applied to the weight changes
      momentum -- momentum with which the weights change"""

        if (types.ListType is not type(expected)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        if (len(expected) != len(self.outputlayer.neurons)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        if (0 == len(self.hiddenlayers)):
            # NOTE : In the current implementation this is actually an error
            # TODO : It should be fine to allow someone to create a simple perceptron
            #        How to deal with this?
            raise exceptions.NeuralWarning('Network has no hidden layers.')

        self.outputlayer.backpropagate(learningrate, momentum, expected)

        self.hiddenlayers.reverse()  # from last to first

        for i in xrange(len(self.hiddenlayers)):
            if (i == 0):
                self.hiddenlayers[i].backpropagate(learningrate, momentum,
                                                   self.outputlayer)
            else:
                self.hiddenlayers[i].backpropagate(learningrate, momentum,
                                                   self.hiddenlayers[i - 1])

        self.hiddenlayers.reverse()  # reveser back !
示例#2
0
文件: ANN.py 项目: mk2908/WSD
    def train(self,
              exemplar,
              learningrate=0.5,
              momentum=0.9):  # examplar: ([input List],[output List])
        """Train the network on one exemplar. An exemplar is a training
      set pair which consists of ([input vector], [expected output vector])
      
      exemplar -- exemplar to train
      learningrate -- learning rate to be applied to the weight changes
      momentum -- momentum with which the weights change"""

        if (types.TupleType is not type(exemplar)):
            raise exceptions.NeuralException(exceptions.ERR_PARAMTYPE)

        if (0 == len(exemplar)):
            raise exceptions.NeuralException(exceptions.ERR_PARAMVAL)

        if (0 == len(self.hiddenlayers)):
            # NOTE : In the current implementation this is actually an error
            # TODO : It should be fine to allow someone to create a simple perceptron
            #        How to deal with this?
            raise exceptions.NeuralWarning(exceptions.ERR_LAYERNOHID)

        self._feedforward(exemplar[0])  # set input for each layer
        self._backpropagate(exemplar[1], learningrate,
                            momentum)  # reset weight for each layer
示例#3
0
文件: ANN.py 项目: mk2908/WSD
    def activate(self):
        """Run the neuron's activation function. The function may be
      set to any of the classes provided in neural.activation or
      by a user supplied class."""

        if (self.inputsum is None):
            raise exceptions.NeuralException(exceptions.ERR_INPUTNONE)

        if (type(self.inputsum) is not types.FloatType):
            raise exceptions.NeuralException(exceptions.ERR_INPUTTYPE)

        self.output = self.activation.activate(self.inputsum)  # Compute output
示例#4
0
文件: ANN.py 项目: mk2908/WSD
    def learn(self, exemplars, epochs=1, learningrate=0.5, momentum=0.9):
        """Teach the network to recognize a set of exemplars by training the
      entire set a number of times.
      
      exemplars -- a list containing exemplar training pairs
      epochs -- the number of epochs to teach : ages / times
      learningrate -- learning rate to be applied to the weight changes
      momentum -- momentum with which the weights change"""

        if (
                types.ListType is not type(exemplars)
        ):  # examplars: [([input List],[output List]),([input List],[output List])... ]
            raise exceptions.NeuralException(exceptions.ERR_PARAMTYPE)

        # Train each exemplar 'epochs' times
        for i in xrange(epochs):
            self.sumsqerr = 0.0

            for ex in exemplars:
                self.train(ex, learningrate, momentum)

                # Update the sum squared error
                for j in xrange(len(self.outputlayer.neurons)):
                    self.sumsqerr = (
                        ex[1][j] - self.outputlayer.neurons[j].output) * (
                            ex[1][j] - self.outputlayer.neurons[j].output)

# Compute the root mean square error for the current epoch.
            self.rmserr = np.sqrt(
                self.sumsqerr /
                (len(exemplars) * len(self.outputlayer.output)))
示例#5
0
文件: ANN.py 项目: mk2908/WSD
    def activate(self):
        """Pass input on as output."""

        if (self.input is None):
            raise exceptions.NeuralException(exceptions.ERR_INPUTNONE)

        self.output = self.input
示例#6
0
文件: ANN.py 项目: mk2908/WSD
    def _seti(self, value):
        """Try to coerce whatever value is to a float."""

        try:
            self._input = float(value)

        except Exception, ex:
            raise exceptions.NeuralException(exceptions.ERR_INPUTTYPE, ex)
示例#7
0
文件: ANN.py 项目: mk2908/WSD
    def _seti(self, value):
        """Set input vector."""

        if (types.ListType is not type(value)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        if (len(value) != len(self.neurons)
            ):  # length of value List must == length of neurons
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        for i in xrange(len(self.neurons)):
            if (types.FloatType is not type(value[i])):
                raise exceptions.NeuralException(exceptions.ERR_INPUTTPE)

            self.neurons[i].input = value[i]

        self._input = value
示例#8
0
文件: ANN.py 项目: mk2908/WSD
    def _seti(self, value):  # Set the input value of each neuron in this layer
        """Set input vector."""

        if (types.ListType is not type(value)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        for i in xrange(len(self.neurons)):
            self.neurons[i].input = value

        self._input = value
示例#9
0
文件: ANN.py 项目: mk2908/WSD
    def _seti(self, value):
        """Set input vector, and compute the input sum that will be
      used by the transfer function to compute this neuron's output.
      The first time through the input vector is assumed to be the 
      correct size, and the weight vector is initialized to be the
      same size as the input vector with random values for the weights.
      Each time after the initial call, the weight and input vector 
      size is assumed fixed.
      
      value -- object on the RHS of the `=' operator"""

        if (types.ListType is not type(value)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        if (0 == len(value)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        # On the first assignment of input, set the weights to a random
        # initial value. Wait until we receive an input vector, because
        # otherwise there's no way of knowing how many weights are needed.
        # Trust the input vector to have the correct number. This is a
        # potential bug.
        if (self.weights is None):
            self.weightdeltas = [0.0] * len(value)
            self.weights = []
            for i in value:
                self.weights.append(random.uniform(
                    -1, 1))  # Wi started with a random between -1 and 1

        # Make sure the input vector and the weights are the same size
        # vectors
        if (len(value) != len(self.weights)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        # Compute the input sum to the transfer function.
        self.inputsum = 0.0
        for i in xrange(len(
                self.weights)):  # Calculate the output by input and weight
            try:
                self.inputsum += (float(value[i]) * self.weights[i])

            except Exception, ex:
                raise exceptions.NeuralException(exceptions.ERR_INDEX, ex)
示例#10
0
文件: ANN.py 项目: mk2908/WSD
    def computedelta(self, nextlayer, index):
        """Compute the amount of error at this node for training so that 
      it can later be minimized.
      
      nextlayer -- the layer to which the outputs of this neuron are 
                   sent (the layer below the one this neuron is in)
      index -- the index in the current layer of this neuron"""

        if ((nextlayer is None)):
            raise exceptions.NeuralException(exceptions.ERR_LAYERNONE)

        if ((index < 0) or (type(index) != types.IntType)):
            raise exceptions.NeuralException(exceptions.ERR_INDEX)

        sum = 0.0

        for neuron in nextlayer.neurons:  # sum = Ek(Wkj*delt-k)
            try:
                sum += neuron.weights[index] * neuron.delta
            except Exception, ex:
                raise exceptions.NeuralException(exceptions.ERR_INDEX, ex)
示例#11
0
文件: ANN.py 项目: mk2908/WSD
    def _feedforward(self, inputs):  # from input to output
        """Feed one input vector through the network and generate an ouput
      vector.
      
      inputs -- input vector"""

        if (types.ListType is not type(inputs)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        if (len(inputs) != len(self.inputlayer.neurons)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        if (0 == len(self.hiddenlayers)):
            # NOTE : In the current implementation this is actually an error
            # TODO : It should be fine to allow someone to create a simple perceptron
            #        How to deal with this?
            raise exceptions.NeuralWarning(exceptions.ERR_LAYERNOHID)

        # Feed through the input layer
        self.inputlayer.input = inputs
        self.inputlayer.feedforward()

        # Feed through the hidden layers
        for i in xrange(
                len(self.hiddenlayers)
        ):  # feedforward() is a method of the abstract class--layer : each neuron.fire()
            if (i == 0):
                self.hiddenlayers[i].input = self.inputlayer.output
            else:
                self.hiddenlayers[i].input = self.hiddenlayers[i - 1].output

            self.hiddenlayers[i].feedforward()

        # And feed through the output layer
        self.outputlayer.input = self.hiddenlayers[len(self.hiddenlayers) -
                                                   1].output
        self.outputlayer.feedforward()

        self.output = self.outputlayer.output
示例#12
0
文件: ANN.py 项目: mk2908/WSD
    def updateweight(self, learningrate, momentum):
        """Update the weights for each input connection. 
      
      learningrate -- learning rate requested by the network		
      momentum -- momentum requested by the network"""   # the rate of the old weight in new weight

        # Try to coerce the learning rate and momentum to a float
        try:
            learningrate = float(learningrate)
            momentum = float(momentum)

        except Exception, ex:
            raise exceptions.NeuralException(exceptions.ERR_PARAMTYPE, ex)
示例#13
0
文件: ANN.py 项目: mk2908/WSD
    def xload(self, filename):
        """Load a network from an AnnML XML file. This should be used
      instead of class.load().
      
      filename -- name of the file to load"""

        tempnetwork = ANN_loader.XMLNeuralLoader().load(filename)

        if (BackPropNet != type(tempnetwork)):
            raise exceptions.NeuralException(exceptions.ERR_NETWORKTYPE)
        else:
            self.inputlayer = tempnetwork.inputlayer
            self.hiddenlayers = tempnetwork.hiddenlayers
            self.outputlayer = tempnetwork.outputlayer
示例#14
0
文件: ANN.py 项目: mk2908/WSD
    def backpropagate(self, learningrate, momentum, expected):
        """Propagate the error from the output up through this layer and to 
      the previous one.
      
      learningrate -- learning rate to be applied to weight changes
      momentum -- momentum with which the weights change
      expected -- expected output vector"""

        if (types.ListType is not type(expected)):
            raise exceptions.NeuralException(exceptions.ERR_VECTOR)

        for i in xrange(len(self.neurons)):
            self.neurons[i].computedelta(
                expected[i]
            )  # OutputNeuron Compute the delta by expected values and self.output
            self.neurons[i].updateweight(learningrate, momentum)
示例#15
0
文件: ANN.py 项目: mk2908/WSD
    def updateweight(self, learningrate, momentum):
        """Update the weights for each input connection. 
      
      learningrate -- learning rate requested by the network		
      momentum -- momentum requested by the network"""   # the rate of the old weight in new weight

        # Try to coerce the learning rate and momentum to a float
        try:
            learningrate = float(learningrate)
            momentum = float(momentum)

        except Exception, ex:
            raise exceptions.NeuralException(exceptions.ERR_PARAMTYPE, ex)

        if (type(learningrate) != types.FloatType):
            raise exceptions.NeuralException(exception.ERR_PARAMTYPE)

        if (type(momentum) != types.FloatType):
            raise exceptions.NeuralException(exceptions.ERR_PARAMTYPE)

        if (self.delta is None):
            raise exceptions.NeuralException(exceptions.ERR_DELTANONE)

        # Update each weight for this neuron.
        for i in xrange(len(self.weights)):  # Wi = m*Wi + n*Xi*delt-j
            self.weightdeltas[i] = (learningrate * self.input[i] * self.delta
                                    ) + (momentum * self.weightdeltas[i])
            self.weights[i] += self.weightdeltas[i]

    def _seti(self, value):
        """Set input vector, and compute the input sum that will be