class Perceptron(object): def __init__(self, input_size, lrn_rate=1): """'input_size' is the length of the input. 'lrn_rate' is the learning rate. """ self.neuron = Neuron([0]*input_size, 0, signal) self.lrn_rate = lrn_rate self.fire = self.neuron.fire def training(self, examples): epochs = 0 while True: epochs = epochs + 1 error_count = 0 for (input_vector, desired_output) in examples: actual_output = self.neuron.fire(input_vector) error = desired_output - actual_output if error != 0: learned = self.lrn_rate*error self.neuron.update(input_vector, learned) error_count = error_count + 1 if error_count == 0: break return epochs def __str__(self): ret = 'lrn_rate: %s' % self.lrn_rate ret = '%s\n%s' % (ret, self.neuron.__str__()) return ret
class Perceptron(object): def __init__(self, input_size, lrn_rate=1): """'input_size' is the length of the input. 'lrn_rate' is the learning rate. """ self.neuron = Neuron([0] * input_size, 0, signal) self.lrn_rate = lrn_rate self.fire = self.neuron.fire def training(self, examples): epochs = 0 while True: epochs = epochs + 1 error_count = 0 for (input_vector, desired_output) in examples: actual_output = self.neuron.fire(input_vector) error = desired_output - actual_output if error != 0: learned = self.lrn_rate * error self.neuron.update(input_vector, learned) error_count = error_count + 1 if error_count == 0: break return epochs def __str__(self): ret = 'lrn_rate: %s' % self.lrn_rate ret = '%s\n%s' % (ret, self.neuron.__str__()) return ret
class Perceptron(object): """Online learning Perceptron. """ def __init__(self, input_size, lrn_rate=1, activation=signal): """'input_size' is the length of the input. 'lrn_rate' is the learning rate. """ self.neuron = Neuron([0] * input_size, 0, activation) self.lrn_rate = lrn_rate self.fire = self.neuron.fire def training(self, inputs_vector, outputs, max_epochs): """Not checking if inputs_vector and outputs have the same size. """ epochs = 0 while True: epochs = epochs + 1 error_count = 0 for (inputs, output) in zip(inputs_vector, outputs): actual_output = self.fire(inputs) error = output - actual_output if error != 0: learned = self.lrn_rate * error self.neuron.update(inputs, learned) error_count = error_count + 1 if error_count == 0: break elif max_epochs and (epochs > max_epochs): return False return epochs def __str__(self): ret = 'lrn_rate: %s' % self.lrn_rate ret = '%s\n%s' % (ret, self.neuron.__str__()) return ret
class Perceptron(object): """Online learning Perceptron. """ def __init__(self, input_size, lrn_rate=1, activation=signal): """'input_size' is the length of the input. 'lrn_rate' is the learning rate. """ self.neuron = Neuron([0]*input_size, 0, activation) self.lrn_rate = lrn_rate self.fire = self.neuron.fire def training(self, inputs_vector, outputs, max_epochs): """Not checking if inputs_vector and outputs have the same size. """ epochs = 0 while True: epochs = epochs + 1 error_count = 0 for (inputs, output) in zip(inputs_vector, outputs): actual_output = self.fire(inputs) error = output - actual_output if error != 0: learned = self.lrn_rate*error self.neuron.update(inputs, learned) error_count = error_count + 1 if error_count == 0: break elif max_epochs and (epochs > max_epochs): return False return epochs def __str__(self): ret = 'lrn_rate: %s' % self.lrn_rate ret = '%s\n%s' % (ret, self.neuron.__str__()) return ret