def LinearLearner(dataset, learning_rate=0.01, epochs=100): """Define with learner = LinearLearner(data); infer with learner(x).""" idx_i = dataset.inputs idx_t = dataset.target # As of now, dataset.target gives only one index. examples = dataset.examples num_examples = len(examples) # X transpose X_col = [dataset.values[i] for i in idx_i] # vertical columns of X # Add dummy ones = [1 for _ in range(len(examples))] X_col = [ones] + X_col # Initialize random weigts num_weights = len(idx_i) + 1 w = random_weights(min_value=-0.5, max_value=0.5, num_weights=num_weights) for epoch in range(epochs): err = [] # Pass over all examples for example in examples: x = [1] + example y = dotproduct(w, x) t = example[idx_t] err.append(t - y) # update weights for i in range(len(w)): w[i] = w[i] + learning_rate * (dotproduct(err, X_col[i]) / num_examples) def predict(example): x = [1] + example return dotproduct(w, x) return predict
def __init__(self, in_size=3, out_size=3, activation=None): super().__init__(out_size) self.out_size = out_size self.inputs = None self.activation = Sigmoid() if not activation else activation # initialize weights for node in self.nodes: node.weights = random_weights(-0.5, 0.5, in_size)
def LogisticLinearLeaner(dataset, learning_rate=0.01, epochs=100): """ [Section 18.6.5] Linear classifier with logistic regression. """ idx_i = dataset.inputs idx_t = dataset.target examples = dataset.examples num_examples = len(examples) # X transpose X_col = [dataset.values[i] for i in idx_i] # vertical columns of X # add dummy ones = [1 for _ in range(len(examples))] X_col = [ones] + X_col # initialize random weights num_weights = len(idx_i) + 1 w = random_weights(min_value=-0.5, max_value=0.5, num_weights=num_weights) for epoch in range(epochs): err = [] h = [] # pass over all examples for example in examples: x = [1] + example y = sigmoid(dot_product(w, x)) h.append(sigmoid_derivative(y)) t = example[idx_t] err.append(t - y) # update weights for i in range(len(w)): buffer = [x * y for x, y in zip(err, h)] w[i] = w[i] + learning_rate * (dot_product(buffer, X_col[i]) / num_examples) def predict(example): x = [1] + example return sigmoid(dot_product(w, x)) return predict
def LogisticLinearLeaner(dataset, learning_rate=0.01, epochs=100): """Define logistic regression classifier in 18.6.5""" idx_i = dataset.inputs idx_t = dataset.target examples = dataset.examples num_examples = len(examples) # X transpose X_col = [dataset.values[i] for i in idx_i] # vertical columns of X # Add dummy ones = [1 for _ in range(len(examples))] X_col = [ones] + X_col # Initialize random weights num_weights = len(idx_i) + 1 w = random_weights(min_value=-0.5, max_value=0.5, num_weights=num_weights) for epoch in range(epochs): err = [] h = [] # Pass over all examples for example in examples: x = [1] + example y = 1 / (1 + math.exp(-dotproduct(w, x))) h.append(y * (1 - y)) t = example[idx_t] err.append(t - y) # update weights for i in range(len(w)): buffer = [x * y for x, y in zip(err, h)] # w[i] = w[i] + learning_rate * (dotproduct(err, X_col[i]) / num_examples) w[i] = w[i] + learning_rate * (dotproduct(buffer, X_col[i]) / num_examples) def predict(example): x = [1] + example return 1 / (1 + math.exp(-dotproduct(w, x))) return predict
def LinearLearner(dataset, learning_rate=0.01, epochs=100): """ [Section 18.6.4] Linear classifier with hard threshold. """ idx_i = dataset.inputs idx_t = dataset.target examples = dataset.examples num_examples = len(examples) # X transpose X_col = [dataset.values[i] for i in idx_i] # vertical columns of X # add dummy ones = [1 for _ in range(len(examples))] X_col = [ones] + X_col # initialize random weights num_weights = len(idx_i) + 1 w = random_weights(min_value=-0.5, max_value=0.5, num_weights=num_weights) for epoch in range(epochs): err = [] # pass over all examples for example in examples: x = [1] + example y = dot_product(w, x) t = example[idx_t] err.append(t - y) # update weights for i in range(len(w)): w[i] = w[i] + learning_rate * (dot_product(err, X_col[i]) / num_examples) def predict(example): x = [1] + example return dot_product(w, x) return predict